fold-const.c (fold_binary_loc): Fix copy-and-pasto.
[gcc.git] / gcc / cgraphunit.c
1 /* Driver of optimization process
2 Copyright (C) 2003-2014 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This module implements main driver of compilation process.
22
23 The main scope of this file is to act as an interface in between
24 tree based frontends and the backend.
25
26 The front-end is supposed to use following functionality:
27
28 - finalize_function
29
30 This function is called once front-end has parsed whole body of function
31 and it is certain that the function body nor the declaration will change.
32
33 (There is one exception needed for implementing GCC extern inline
34 function.)
35
36 - varpool_finalize_decl
37
38 This function has same behavior as the above but is used for static
39 variables.
40
41 - add_asm_node
42
43 Insert new toplevel ASM statement
44
45 - finalize_compilation_unit
46
47 This function is called once (source level) compilation unit is finalized
48 and it will no longer change.
49
50 The symbol table is constructed starting from the trivially needed
51 symbols finalized by the frontend. Functions are lowered into
52 GIMPLE representation and callgraph/reference lists are constructed.
53 Those are used to discover other necessary functions and variables.
54
55 At the end the bodies of unreachable functions are removed.
56
57 The function can be called multiple times when multiple source level
58 compilation units are combined.
59
60 - compile
61
62 This passes control to the back-end. Optimizations are performed and
63 final assembler is generated. This is done in the following way. Note
64 that with link time optimization the process is split into three
65 stages (compile time, linktime analysis and parallel linktime as
66 indicated bellow).
67
68 Compile time:
69
70 1) Inter-procedural optimization.
71 (ipa_passes)
72
73 This part is further split into:
74
75 a) early optimizations. These are local passes executed in
76 the topological order on the callgraph.
77
78 The purpose of early optimiations is to optimize away simple
79 things that may otherwise confuse IP analysis. Very simple
80 propagation across the callgraph is done i.e. to discover
81 functions without side effects and simple inlining is performed.
82
83 b) early small interprocedural passes.
84
85 Those are interprocedural passes executed only at compilation
86 time. These include, for example, transational memory lowering,
87 unreachable code removal and other simple transformations.
88
89 c) IP analysis stage. All interprocedural passes do their
90 analysis.
91
92 Interprocedural passes differ from small interprocedural
93 passes by their ability to operate across whole program
94 at linktime. Their analysis stage is performed early to
95 both reduce linking times and linktime memory usage by
96 not having to represent whole program in memory.
97
98 d) LTO sreaming. When doing LTO, everything important gets
99 streamed into the object file.
100
101 Compile time and or linktime analysis stage (WPA):
102
103 At linktime units gets streamed back and symbol table is
104 merged. Function bodies are not streamed in and not
105 available.
106 e) IP propagation stage. All IP passes execute their
107 IP propagation. This is done based on the earlier analysis
108 without having function bodies at hand.
109 f) Ltrans streaming. When doing WHOPR LTO, the program
110 is partitioned and streamed into multple object files.
111
112 Compile time and/or parallel linktime stage (ltrans)
113
114 Each of the object files is streamed back and compiled
115 separately. Now the function bodies becomes available
116 again.
117
118 2) Virtual clone materialization
119 (cgraph_materialize_clone)
120
121 IP passes can produce copies of existing functoins (such
122 as versioned clones or inline clones) without actually
123 manipulating their bodies by creating virtual clones in
124 the callgraph. At this time the virtual clones are
125 turned into real functions
126 3) IP transformation
127
128 All IP passes transform function bodies based on earlier
129 decision of the IP propagation.
130
131 4) late small IP passes
132
133 Simple IP passes working within single program partition.
134
135 5) Expansion
136 (expand_all_functions)
137
138 At this stage functions that needs to be output into
139 assembler are identified and compiled in topological order
140 6) Output of variables and aliases
141 Now it is known what variable references was not optimized
142 out and thus all variables are output to the file.
143
144 Note that with -fno-toplevel-reorder passes 5 and 6
145 are combined together in cgraph_output_in_order.
146
147 Finally there are functions to manipulate the callgraph from
148 backend.
149 - cgraph_add_new_function is used to add backend produced
150 functions introduced after the unit is finalized.
151 The functions are enqueue for later processing and inserted
152 into callgraph with cgraph_process_new_functions.
153
154 - cgraph_function_versioning
155
156 produces a copy of function into new one (a version)
157 and apply simple transformations
158 */
159
160 #include "config.h"
161 #include "system.h"
162 #include "coretypes.h"
163 #include "tm.h"
164 #include "tree.h"
165 #include "varasm.h"
166 #include "stor-layout.h"
167 #include "stringpool.h"
168 #include "output.h"
169 #include "rtl.h"
170 #include "basic-block.h"
171 #include "tree-ssa-alias.h"
172 #include "internal-fn.h"
173 #include "gimple-fold.h"
174 #include "gimple-expr.h"
175 #include "is-a.h"
176 #include "gimple.h"
177 #include "gimplify.h"
178 #include "gimple-iterator.h"
179 #include "gimplify-me.h"
180 #include "gimple-ssa.h"
181 #include "tree-cfg.h"
182 #include "tree-into-ssa.h"
183 #include "tree-ssa.h"
184 #include "tree-inline.h"
185 #include "langhooks.h"
186 #include "toplev.h"
187 #include "flags.h"
188 #include "debug.h"
189 #include "target.h"
190 #include "diagnostic.h"
191 #include "params.h"
192 #include "fibheap.h"
193 #include "intl.h"
194 #include "hashtab.h"
195 #include "hash-set.h"
196 #include "vec.h"
197 #include "machmode.h"
198 #include "hard-reg-set.h"
199 #include "input.h"
200 #include "function.h"
201 #include "ipa-prop.h"
202 #include "tree-iterator.h"
203 #include "tree-pass.h"
204 #include "tree-dump.h"
205 #include "gimple-pretty-print.h"
206 #include "output.h"
207 #include "coverage.h"
208 #include "plugin.h"
209 #include "ipa-inline.h"
210 #include "ipa-utils.h"
211 #include "lto-streamer.h"
212 #include "except.h"
213 #include "cfgloop.h"
214 #include "regset.h" /* FIXME: For reg_obstack. */
215 #include "context.h"
216 #include "pass_manager.h"
217 #include "tree-nested.h"
218 #include "gimplify.h"
219 #include "dbgcnt.h"
220
221 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
222 secondary queue used during optimization to accommodate passes that
223 may generate new functions that need to be optimized and expanded. */
224 vec<cgraph_node *> cgraph_new_nodes;
225
226 static void expand_all_functions (void);
227 static void mark_functions_to_output (void);
228 static void handle_alias_pairs (void);
229
230 /* Used for vtable lookup in thunk adjusting. */
231 static GTY (()) tree vtable_entry_type;
232
233 /* Determine if symbol declaration is needed. That is, visible to something
234 either outside this translation unit, something magic in the system
235 configury */
236 bool
237 symtab_node::needed_p (void)
238 {
239 /* Double check that no one output the function into assembly file
240 early. */
241 gcc_checking_assert (!DECL_ASSEMBLER_NAME_SET_P (decl)
242 || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)));
243
244 if (!definition)
245 return false;
246
247 if (DECL_EXTERNAL (decl))
248 return false;
249
250 /* If the user told us it is used, then it must be so. */
251 if (force_output)
252 return true;
253
254 /* ABI forced symbols are needed when they are external. */
255 if (forced_by_abi && TREE_PUBLIC (decl))
256 return true;
257
258 /* Keep constructors, destructors and virtual functions. */
259 if (TREE_CODE (decl) == FUNCTION_DECL
260 && (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl)))
261 return true;
262
263 /* Externally visible variables must be output. The exception is
264 COMDAT variables that must be output only when they are needed. */
265 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
266 return true;
267
268 return false;
269 }
270
271 /* Head and terminator of the queue of nodes to be processed while building
272 callgraph. */
273
274 static symtab_node symtab_terminator;
275 static symtab_node *queued_nodes = &symtab_terminator;
276
277 /* Add NODE to queue starting at QUEUED_NODES.
278 The queue is linked via AUX pointers and terminated by pointer to 1. */
279
280 static void
281 enqueue_node (symtab_node *node)
282 {
283 if (node->aux)
284 return;
285 gcc_checking_assert (queued_nodes);
286 node->aux = queued_nodes;
287 queued_nodes = node;
288 }
289
290 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
291 functions into callgraph in a way so they look like ordinary reachable
292 functions inserted into callgraph already at construction time. */
293
294 void
295 symbol_table::process_new_functions (void)
296 {
297 tree fndecl;
298
299 if (!cgraph_new_nodes.exists ())
300 return;
301
302 handle_alias_pairs ();
303 /* Note that this queue may grow as its being processed, as the new
304 functions may generate new ones. */
305 for (unsigned i = 0; i < cgraph_new_nodes.length (); i++)
306 {
307 cgraph_node *node = cgraph_new_nodes[i];
308 fndecl = node->decl;
309 switch (state)
310 {
311 case CONSTRUCTION:
312 /* At construction time we just need to finalize function and move
313 it into reachable functions list. */
314
315 cgraph_node::finalize_function (fndecl, false);
316 call_cgraph_insertion_hooks (node);
317 enqueue_node (node);
318 break;
319
320 case IPA:
321 case IPA_SSA:
322 /* When IPA optimization already started, do all essential
323 transformations that has been already performed on the whole
324 cgraph but not on this function. */
325
326 gimple_register_cfg_hooks ();
327 if (!node->analyzed)
328 node->analyze ();
329 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
330 if (state == IPA_SSA
331 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
332 g->get_passes ()->execute_early_local_passes ();
333 else if (inline_summary_vec != NULL)
334 compute_inline_parameters (node, true);
335 free_dominance_info (CDI_POST_DOMINATORS);
336 free_dominance_info (CDI_DOMINATORS);
337 pop_cfun ();
338 call_cgraph_insertion_hooks (node);
339 break;
340
341 case EXPANSION:
342 /* Functions created during expansion shall be compiled
343 directly. */
344 node->process = 0;
345 call_cgraph_insertion_hooks (node);
346 node->expand ();
347 break;
348
349 default:
350 gcc_unreachable ();
351 break;
352 }
353 }
354
355 cgraph_new_nodes.release ();
356 }
357
358 /* As an GCC extension we allow redefinition of the function. The
359 semantics when both copies of bodies differ is not well defined.
360 We replace the old body with new body so in unit at a time mode
361 we always use new body, while in normal mode we may end up with
362 old body inlined into some functions and new body expanded and
363 inlined in others.
364
365 ??? It may make more sense to use one body for inlining and other
366 body for expanding the function but this is difficult to do. */
367
368 void
369 cgraph_node::reset (void)
370 {
371 /* If process is set, then we have already begun whole-unit analysis.
372 This is *not* testing for whether we've already emitted the function.
373 That case can be sort-of legitimately seen with real function redefinition
374 errors. I would argue that the front end should never present us with
375 such a case, but don't enforce that for now. */
376 gcc_assert (!process);
377
378 /* Reset our data structures so we can analyze the function again. */
379 memset (&local, 0, sizeof (local));
380 memset (&global, 0, sizeof (global));
381 memset (&rtl, 0, sizeof (rtl));
382 analyzed = false;
383 definition = false;
384 alias = false;
385 weakref = false;
386 cpp_implicit_alias = false;
387
388 remove_callees ();
389 remove_all_references ();
390 }
391
392 /* Return true when there are references to the node. */
393
394 bool
395 symtab_node::referred_to_p (void)
396 {
397 ipa_ref *ref = NULL;
398
399 /* See if there are any references at all. */
400 if (iterate_referring (0, ref))
401 return true;
402 /* For functions check also calls. */
403 cgraph_node *cn = dyn_cast <cgraph_node *> (this);
404 if (cn && cn->callers)
405 return true;
406 return false;
407 }
408
409 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
410 logic in effect. If NO_COLLECT is true, then our caller cannot stand to have
411 the garbage collector run at the moment. We would need to either create
412 a new GC context, or just not compile right now. */
413
414 void
415 cgraph_node::finalize_function (tree decl, bool no_collect)
416 {
417 cgraph_node *node = cgraph_node::get_create (decl);
418
419 if (node->definition)
420 {
421 /* Nested functions should only be defined once. */
422 gcc_assert (!DECL_CONTEXT (decl)
423 || TREE_CODE (DECL_CONTEXT (decl)) != FUNCTION_DECL);
424 node->reset ();
425 node->local.redefined_extern_inline = true;
426 }
427
428 notice_global_symbol (decl);
429 node->definition = true;
430 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
431
432 /* With -fkeep-inline-functions we are keeping all inline functions except
433 for extern inline ones. */
434 if (flag_keep_inline_functions
435 && DECL_DECLARED_INLINE_P (decl)
436 && !DECL_EXTERNAL (decl)
437 && !DECL_DISREGARD_INLINE_LIMITS (decl))
438 node->force_output = 1;
439
440 /* When not optimizing, also output the static functions. (see
441 PR24561), but don't do so for always_inline functions, functions
442 declared inline and nested functions. These were optimized out
443 in the original implementation and it is unclear whether we want
444 to change the behavior here. */
445 if ((!optimize
446 && !node->cpp_implicit_alias
447 && !DECL_DISREGARD_INLINE_LIMITS (decl)
448 && !DECL_DECLARED_INLINE_P (decl)
449 && !(DECL_CONTEXT (decl)
450 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL))
451 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
452 node->force_output = 1;
453
454 /* If we've not yet emitted decl, tell the debug info about it. */
455 if (!TREE_ASM_WRITTEN (decl))
456 (*debug_hooks->deferred_inline_function) (decl);
457
458 /* Possibly warn about unused parameters. */
459 if (warn_unused_parameter)
460 do_warn_unused_parameter (decl);
461
462 if (!no_collect)
463 ggc_collect ();
464
465 if (symtab->state == CONSTRUCTION
466 && (node->needed_p () || node->referred_to_p ()))
467 enqueue_node (node);
468 }
469
470 /* Add the function FNDECL to the call graph.
471 Unlike finalize_function, this function is intended to be used
472 by middle end and allows insertion of new function at arbitrary point
473 of compilation. The function can be either in high, low or SSA form
474 GIMPLE.
475
476 The function is assumed to be reachable and have address taken (so no
477 API breaking optimizations are performed on it).
478
479 Main work done by this function is to enqueue the function for later
480 processing to avoid need the passes to be re-entrant. */
481
482 void
483 cgraph_node::add_new_function (tree fndecl, bool lowered)
484 {
485 gcc::pass_manager *passes = g->get_passes ();
486 cgraph_node *node;
487 switch (symtab->state)
488 {
489 case PARSING:
490 cgraph_node::finalize_function (fndecl, false);
491 break;
492 case CONSTRUCTION:
493 /* Just enqueue function to be processed at nearest occurrence. */
494 node = cgraph_node::get_create (fndecl);
495 if (lowered)
496 node->lowered = true;
497 cgraph_new_nodes.safe_push (node);
498 break;
499
500 case IPA:
501 case IPA_SSA:
502 case EXPANSION:
503 /* Bring the function into finalized state and enqueue for later
504 analyzing and compilation. */
505 node = cgraph_node::get_create (fndecl);
506 node->local.local = false;
507 node->definition = true;
508 node->force_output = true;
509 if (!lowered && symtab->state == EXPANSION)
510 {
511 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
512 gimple_register_cfg_hooks ();
513 bitmap_obstack_initialize (NULL);
514 execute_pass_list (cfun, passes->all_lowering_passes);
515 passes->execute_early_local_passes ();
516 bitmap_obstack_release (NULL);
517 pop_cfun ();
518
519 lowered = true;
520 }
521 if (lowered)
522 node->lowered = true;
523 cgraph_new_nodes.safe_push (node);
524 break;
525
526 case FINISHED:
527 /* At the very end of compilation we have to do all the work up
528 to expansion. */
529 node = cgraph_node::create (fndecl);
530 if (lowered)
531 node->lowered = true;
532 node->definition = true;
533 node->analyze ();
534 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
535 gimple_register_cfg_hooks ();
536 bitmap_obstack_initialize (NULL);
537 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
538 g->get_passes ()->execute_early_local_passes ();
539 bitmap_obstack_release (NULL);
540 pop_cfun ();
541 node->expand ();
542 break;
543
544 default:
545 gcc_unreachable ();
546 }
547
548 /* Set a personality if required and we already passed EH lowering. */
549 if (lowered
550 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
551 == eh_personality_lang))
552 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
553 }
554
555 /* Analyze the function scheduled to be output. */
556 void
557 cgraph_node::analyze (void)
558 {
559 tree decl = this->decl;
560 location_t saved_loc = input_location;
561 input_location = DECL_SOURCE_LOCATION (decl);
562
563 if (thunk.thunk_p)
564 {
565 create_edge (cgraph_node::get (thunk.alias),
566 NULL, 0, CGRAPH_FREQ_BASE);
567 if (!expand_thunk (false, false))
568 {
569 thunk.alias = NULL;
570 return;
571 }
572 thunk.alias = NULL;
573 }
574 if (alias)
575 resolve_alias (cgraph_node::get (alias_target));
576 else if (dispatcher_function)
577 {
578 /* Generate the dispatcher body of multi-versioned functions. */
579 cgraph_function_version_info *dispatcher_version_info
580 = function_version ();
581 if (dispatcher_version_info != NULL
582 && (dispatcher_version_info->dispatcher_resolver
583 == NULL_TREE))
584 {
585 tree resolver = NULL_TREE;
586 gcc_assert (targetm.generate_version_dispatcher_body);
587 resolver = targetm.generate_version_dispatcher_body (this);
588 gcc_assert (resolver != NULL_TREE);
589 }
590 }
591 else
592 {
593 push_cfun (DECL_STRUCT_FUNCTION (decl));
594
595 assign_assembler_name_if_neeeded (decl);
596
597 /* Make sure to gimplify bodies only once. During analyzing a
598 function we lower it, which will require gimplified nested
599 functions, so we can end up here with an already gimplified
600 body. */
601 if (!gimple_has_body_p (decl))
602 gimplify_function_tree (decl);
603 dump_function (TDI_generic, decl);
604
605 /* Lower the function. */
606 if (!lowered)
607 {
608 if (nested)
609 lower_nested_functions (decl);
610 gcc_assert (!nested);
611
612 gimple_register_cfg_hooks ();
613 bitmap_obstack_initialize (NULL);
614 execute_pass_list (cfun, g->get_passes ()->all_lowering_passes);
615 free_dominance_info (CDI_POST_DOMINATORS);
616 free_dominance_info (CDI_DOMINATORS);
617 compact_blocks ();
618 bitmap_obstack_release (NULL);
619 lowered = true;
620 }
621
622 pop_cfun ();
623 }
624 analyzed = true;
625
626 input_location = saved_loc;
627 }
628
629 /* C++ frontend produce same body aliases all over the place, even before PCH
630 gets streamed out. It relies on us linking the aliases with their function
631 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
632 first produce aliases without links, but once C++ FE is sure he won't sream
633 PCH we build the links via this function. */
634
635 void
636 symbol_table::process_same_body_aliases (void)
637 {
638 symtab_node *node;
639 FOR_EACH_SYMBOL (node)
640 if (node->cpp_implicit_alias && !node->analyzed)
641 node->resolve_alias
642 (TREE_CODE (node->alias_target) == VAR_DECL
643 ? (symtab_node *)varpool_node::get_create (node->alias_target)
644 : (symtab_node *)cgraph_node::get_create (node->alias_target));
645 cpp_implicit_aliases_done = true;
646 }
647
648 /* Process attributes common for vars and functions. */
649
650 static void
651 process_common_attributes (symtab_node *node, tree decl)
652 {
653 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
654
655 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
656 {
657 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
658 "%<weakref%> attribute should be accompanied with"
659 " an %<alias%> attribute");
660 DECL_WEAK (decl) = 0;
661 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
662 DECL_ATTRIBUTES (decl));
663 }
664
665 if (lookup_attribute ("no_reorder", DECL_ATTRIBUTES (decl)))
666 node->no_reorder = 1;
667 }
668
669 /* Look for externally_visible and used attributes and mark cgraph nodes
670 accordingly.
671
672 We cannot mark the nodes at the point the attributes are processed (in
673 handle_*_attribute) because the copy of the declarations available at that
674 point may not be canonical. For example, in:
675
676 void f();
677 void f() __attribute__((used));
678
679 the declaration we see in handle_used_attribute will be the second
680 declaration -- but the front end will subsequently merge that declaration
681 with the original declaration and discard the second declaration.
682
683 Furthermore, we can't mark these nodes in finalize_function because:
684
685 void f() {}
686 void f() __attribute__((externally_visible));
687
688 is valid.
689
690 So, we walk the nodes at the end of the translation unit, applying the
691 attributes at that point. */
692
693 static void
694 process_function_and_variable_attributes (cgraph_node *first,
695 varpool_node *first_var)
696 {
697 cgraph_node *node;
698 varpool_node *vnode;
699
700 for (node = symtab->first_function (); node != first;
701 node = symtab->next_function (node))
702 {
703 tree decl = node->decl;
704 if (DECL_PRESERVE_P (decl))
705 node->mark_force_output ();
706 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
707 {
708 if (! TREE_PUBLIC (node->decl))
709 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
710 "%<externally_visible%>"
711 " attribute have effect only on public objects");
712 }
713 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
714 && (node->definition && !node->alias))
715 {
716 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
717 "%<weakref%> attribute ignored"
718 " because function is defined");
719 DECL_WEAK (decl) = 0;
720 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
721 DECL_ATTRIBUTES (decl));
722 }
723
724 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
725 && !DECL_DECLARED_INLINE_P (decl)
726 /* redefining extern inline function makes it DECL_UNINLINABLE. */
727 && !DECL_UNINLINABLE (decl))
728 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
729 "always_inline function might not be inlinable");
730
731 process_common_attributes (node, decl);
732 }
733 for (vnode = symtab->first_variable (); vnode != first_var;
734 vnode = symtab->next_variable (vnode))
735 {
736 tree decl = vnode->decl;
737 if (DECL_EXTERNAL (decl)
738 && DECL_INITIAL (decl))
739 varpool_node::finalize_decl (decl);
740 if (DECL_PRESERVE_P (decl))
741 vnode->force_output = true;
742 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
743 {
744 if (! TREE_PUBLIC (vnode->decl))
745 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
746 "%<externally_visible%>"
747 " attribute have effect only on public objects");
748 }
749 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
750 && vnode->definition
751 && DECL_INITIAL (decl))
752 {
753 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
754 "%<weakref%> attribute ignored"
755 " because variable is initialized");
756 DECL_WEAK (decl) = 0;
757 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
758 DECL_ATTRIBUTES (decl));
759 }
760 process_common_attributes (vnode, decl);
761 }
762 }
763
764 /* Mark DECL as finalized. By finalizing the declaration, frontend instruct the
765 middle end to output the variable to asm file, if needed or externally
766 visible. */
767
768 void
769 varpool_node::finalize_decl (tree decl)
770 {
771 varpool_node *node = varpool_node::get_create (decl);
772
773 gcc_assert (TREE_STATIC (decl) || DECL_EXTERNAL (decl));
774
775 if (node->definition)
776 return;
777 notice_global_symbol (decl);
778 node->definition = true;
779 if (TREE_THIS_VOLATILE (decl) || DECL_PRESERVE_P (decl)
780 /* Traditionally we do not eliminate static variables when not
781 optimizing and when not doing toplevel reoder. */
782 || node->no_reorder
783 || ((!flag_toplevel_reorder
784 && !DECL_COMDAT (node->decl)
785 && !DECL_ARTIFICIAL (node->decl))))
786 node->force_output = true;
787
788 if (symtab->state == CONSTRUCTION
789 && (node->needed_p () || node->referred_to_p ()))
790 enqueue_node (node);
791 if (symtab->state >= IPA_SSA)
792 node->analyze ();
793 /* Some frontends produce various interface variables after compilation
794 finished. */
795 if (symtab->state == FINISHED
796 || (!flag_toplevel_reorder
797 && symtab->state == EXPANSION))
798 node->assemble_decl ();
799 }
800
801 /* EDGE is an polymorphic call. Mark all possible targets as reachable
802 and if there is only one target, perform trivial devirtualization.
803 REACHABLE_CALL_TARGETS collects target lists we already walked to
804 avoid udplicate work. */
805
806 static void
807 walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
808 cgraph_edge *edge)
809 {
810 unsigned int i;
811 void *cache_token;
812 bool final;
813 vec <cgraph_node *>targets
814 = possible_polymorphic_call_targets
815 (edge, &final, &cache_token);
816
817 if (!reachable_call_targets->add (cache_token))
818 {
819 if (symtab->dump_file)
820 dump_possible_polymorphic_call_targets
821 (symtab->dump_file, edge);
822
823 for (i = 0; i < targets.length (); i++)
824 {
825 /* Do not bother to mark virtual methods in anonymous namespace;
826 either we will find use of virtual table defining it, or it is
827 unused. */
828 if (targets[i]->definition
829 && TREE_CODE
830 (TREE_TYPE (targets[i]->decl))
831 == METHOD_TYPE
832 && !type_in_anonymous_namespace_p
833 (method_class_type
834 (TREE_TYPE (targets[i]->decl))))
835 enqueue_node (targets[i]);
836 }
837 }
838
839 /* Very trivial devirtualization; when the type is
840 final or anonymous (so we know all its derivation)
841 and there is only one possible virtual call target,
842 make the edge direct. */
843 if (final)
844 {
845 if (targets.length () <= 1 && dbg_cnt (devirt))
846 {
847 cgraph_node *target;
848 if (targets.length () == 1)
849 target = targets[0];
850 else
851 target = cgraph_node::create
852 (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
853
854 if (symtab->dump_file)
855 {
856 fprintf (symtab->dump_file,
857 "Devirtualizing call: ");
858 print_gimple_stmt (symtab->dump_file,
859 edge->call_stmt, 0,
860 TDF_SLIM);
861 }
862 if (dump_enabled_p ())
863 {
864 location_t locus = gimple_location_safe (edge->call_stmt);
865 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
866 "devirtualizing call in %s to %s\n",
867 edge->caller->name (), target->name ());
868 }
869
870 edge->make_direct (target);
871 edge->redirect_call_stmt_to_callee ();
872 if (symtab->dump_file)
873 {
874 fprintf (symtab->dump_file,
875 "Devirtualized as: ");
876 print_gimple_stmt (symtab->dump_file,
877 edge->call_stmt, 0,
878 TDF_SLIM);
879 }
880 }
881 }
882 }
883
884
885 /* Discover all functions and variables that are trivially needed, analyze
886 them as well as all functions and variables referred by them */
887 static cgraph_node *first_analyzed;
888 static varpool_node *first_analyzed_var;
889
890 static void
891 analyze_functions (void)
892 {
893 /* Keep track of already processed nodes when called multiple times for
894 intermodule optimization. */
895 cgraph_node *first_handled = first_analyzed;
896 varpool_node *first_handled_var = first_analyzed_var;
897 hash_set<void *> reachable_call_targets;
898
899 symtab_node *node;
900 symtab_node *next;
901 int i;
902 ipa_ref *ref;
903 bool changed = true;
904 location_t saved_loc = input_location;
905
906 bitmap_obstack_initialize (NULL);
907 symtab->state = CONSTRUCTION;
908 input_location = UNKNOWN_LOCATION;
909
910 /* Ugly, but the fixup can not happen at a time same body alias is created;
911 C++ FE is confused about the COMDAT groups being right. */
912 if (symtab->cpp_implicit_aliases_done)
913 FOR_EACH_SYMBOL (node)
914 if (node->cpp_implicit_alias)
915 node->fixup_same_cpp_alias_visibility (node->get_alias_target ());
916 if (optimize && flag_devirtualize)
917 build_type_inheritance_graph ();
918
919 /* Analysis adds static variables that in turn adds references to new functions.
920 So we need to iterate the process until it stabilize. */
921 while (changed)
922 {
923 changed = false;
924 process_function_and_variable_attributes (first_analyzed,
925 first_analyzed_var);
926
927 /* First identify the trivially needed symbols. */
928 for (node = symtab->first_symbol ();
929 node != first_analyzed
930 && node != first_analyzed_var; node = node->next)
931 {
932 /* Convert COMDAT group designators to IDENTIFIER_NODEs. */
933 node->get_comdat_group_id ();
934 if (node->needed_p ())
935 {
936 enqueue_node (node);
937 if (!changed && symtab->dump_file)
938 fprintf (symtab->dump_file, "Trivially needed symbols:");
939 changed = true;
940 if (symtab->dump_file)
941 fprintf (symtab->dump_file, " %s", node->asm_name ());
942 if (!changed && symtab->dump_file)
943 fprintf (symtab->dump_file, "\n");
944 }
945 if (node == first_analyzed
946 || node == first_analyzed_var)
947 break;
948 }
949 symtab->process_new_functions ();
950 first_analyzed_var = symtab->first_variable ();
951 first_analyzed = symtab->first_function ();
952
953 if (changed && symtab->dump_file)
954 fprintf (symtab->dump_file, "\n");
955
956 /* Lower representation, build callgraph edges and references for all trivially
957 needed symbols and all symbols referred by them. */
958 while (queued_nodes != &symtab_terminator)
959 {
960 changed = true;
961 node = queued_nodes;
962 queued_nodes = (symtab_node *)queued_nodes->aux;
963 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
964 if (cnode && cnode->definition)
965 {
966 cgraph_edge *edge;
967 tree decl = cnode->decl;
968
969 /* ??? It is possible to create extern inline function
970 and later using weak alias attribute to kill its body.
971 See gcc.c-torture/compile/20011119-1.c */
972 if (!DECL_STRUCT_FUNCTION (decl)
973 && !cnode->alias
974 && !cnode->thunk.thunk_p
975 && !cnode->dispatcher_function)
976 {
977 cnode->reset ();
978 cnode->local.redefined_extern_inline = true;
979 continue;
980 }
981
982 if (!cnode->analyzed)
983 cnode->analyze ();
984
985 for (edge = cnode->callees; edge; edge = edge->next_callee)
986 if (edge->callee->definition)
987 enqueue_node (edge->callee);
988 if (optimize && flag_devirtualize)
989 {
990 cgraph_edge *next;
991
992 for (edge = cnode->indirect_calls; edge; edge = next)
993 {
994 next = edge->next_callee;
995 if (edge->indirect_info->polymorphic)
996 walk_polymorphic_call_targets (&reachable_call_targets,
997 edge);
998 }
999 }
1000
1001 /* If decl is a clone of an abstract function,
1002 mark that abstract function so that we don't release its body.
1003 The DECL_INITIAL() of that abstract function declaration
1004 will be later needed to output debug info. */
1005 if (DECL_ABSTRACT_ORIGIN (decl))
1006 {
1007 cgraph_node *origin_node
1008 = cgraph_node::get_create (DECL_ABSTRACT_ORIGIN (decl));
1009 origin_node->used_as_abstract_origin = true;
1010 }
1011 }
1012 else
1013 {
1014 varpool_node *vnode = dyn_cast <varpool_node *> (node);
1015 if (vnode && vnode->definition && !vnode->analyzed)
1016 vnode->analyze ();
1017 }
1018
1019 if (node->same_comdat_group)
1020 {
1021 symtab_node *next;
1022 for (next = node->same_comdat_group;
1023 next != node;
1024 next = next->same_comdat_group)
1025 enqueue_node (next);
1026 }
1027 for (i = 0; node->iterate_reference (i, ref); i++)
1028 if (ref->referred->definition)
1029 enqueue_node (ref->referred);
1030 symtab->process_new_functions ();
1031 }
1032 }
1033 if (optimize && flag_devirtualize)
1034 update_type_inheritance_graph ();
1035
1036 /* Collect entry points to the unit. */
1037 if (symtab->dump_file)
1038 {
1039 fprintf (symtab->dump_file, "\n\nInitial ");
1040 symtab_node::dump_table (symtab->dump_file);
1041 }
1042
1043 if (symtab->dump_file)
1044 fprintf (symtab->dump_file, "\nRemoving unused symbols:");
1045
1046 for (node = symtab->first_symbol ();
1047 node != first_handled
1048 && node != first_handled_var; node = next)
1049 {
1050 next = node->next;
1051 if (!node->aux && !node->referred_to_p ())
1052 {
1053 if (symtab->dump_file)
1054 fprintf (symtab->dump_file, " %s", node->name ());
1055 node->remove ();
1056 continue;
1057 }
1058 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1059 {
1060 tree decl = node->decl;
1061
1062 if (cnode->definition && !gimple_has_body_p (decl)
1063 && !cnode->alias
1064 && !cnode->thunk.thunk_p)
1065 cnode->reset ();
1066
1067 gcc_assert (!cnode->definition || cnode->thunk.thunk_p
1068 || cnode->alias
1069 || gimple_has_body_p (decl));
1070 gcc_assert (cnode->analyzed == cnode->definition);
1071 }
1072 node->aux = NULL;
1073 }
1074 for (;node; node = node->next)
1075 node->aux = NULL;
1076 first_analyzed = symtab->first_function ();
1077 first_analyzed_var = symtab->first_variable ();
1078 if (symtab->dump_file)
1079 {
1080 fprintf (symtab->dump_file, "\n\nReclaimed ");
1081 symtab_node::dump_table (symtab->dump_file);
1082 }
1083 bitmap_obstack_release (NULL);
1084 ggc_collect ();
1085 /* Initialize assembler name hash, in particular we want to trigger C++
1086 mangling and same body alias creation before we free DECL_ARGUMENTS
1087 used by it. */
1088 if (!seen_error ())
1089 symtab->symtab_initialize_asm_name_hash ();
1090
1091 input_location = saved_loc;
1092 }
1093
1094 /* Translate the ugly representation of aliases as alias pairs into nice
1095 representation in callgraph. We don't handle all cases yet,
1096 unfortunately. */
1097
1098 static void
1099 handle_alias_pairs (void)
1100 {
1101 alias_pair *p;
1102 unsigned i;
1103
1104 for (i = 0; alias_pairs && alias_pairs->iterate (i, &p);)
1105 {
1106 symtab_node *target_node = symtab_node::get_for_asmname (p->target);
1107
1108 /* Weakrefs with target not defined in current unit are easy to handle:
1109 they behave just as external variables except we need to note the
1110 alias flag to later output the weakref pseudo op into asm file. */
1111 if (!target_node
1112 && lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL)
1113 {
1114 symtab_node *node = symtab_node::get (p->decl);
1115 if (node)
1116 {
1117 node->alias_target = p->target;
1118 node->weakref = true;
1119 node->alias = true;
1120 }
1121 alias_pairs->unordered_remove (i);
1122 continue;
1123 }
1124 else if (!target_node)
1125 {
1126 error ("%q+D aliased to undefined symbol %qE", p->decl, p->target);
1127 symtab_node *node = symtab_node::get (p->decl);
1128 if (node)
1129 node->alias = false;
1130 alias_pairs->unordered_remove (i);
1131 continue;
1132 }
1133
1134 if (DECL_EXTERNAL (target_node->decl)
1135 /* We use local aliases for C++ thunks to force the tailcall
1136 to bind locally. This is a hack - to keep it working do
1137 the following (which is not strictly correct). */
1138 && (TREE_CODE (target_node->decl) != FUNCTION_DECL
1139 || ! DECL_VIRTUAL_P (target_node->decl))
1140 && ! lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)))
1141 {
1142 error ("%q+D aliased to external symbol %qE",
1143 p->decl, p->target);
1144 }
1145
1146 if (TREE_CODE (p->decl) == FUNCTION_DECL
1147 && target_node && is_a <cgraph_node *> (target_node))
1148 {
1149 cgraph_node *src_node = cgraph_node::get (p->decl);
1150 if (src_node && src_node->definition)
1151 src_node->reset ();
1152 cgraph_node::create_alias (p->decl, target_node->decl);
1153 alias_pairs->unordered_remove (i);
1154 }
1155 else if (TREE_CODE (p->decl) == VAR_DECL
1156 && target_node && is_a <varpool_node *> (target_node))
1157 {
1158 varpool_node::create_alias (p->decl, target_node->decl);
1159 alias_pairs->unordered_remove (i);
1160 }
1161 else
1162 {
1163 error ("%q+D alias in between function and variable is not supported",
1164 p->decl);
1165 warning (0, "%q+D aliased declaration",
1166 target_node->decl);
1167 alias_pairs->unordered_remove (i);
1168 }
1169 }
1170 vec_free (alias_pairs);
1171 }
1172
1173
1174 /* Figure out what functions we want to assemble. */
1175
1176 static void
1177 mark_functions_to_output (void)
1178 {
1179 cgraph_node *node;
1180 #ifdef ENABLE_CHECKING
1181 bool check_same_comdat_groups = false;
1182
1183 FOR_EACH_FUNCTION (node)
1184 gcc_assert (!node->process);
1185 #endif
1186
1187 FOR_EACH_FUNCTION (node)
1188 {
1189 tree decl = node->decl;
1190
1191 gcc_assert (!node->process || node->same_comdat_group);
1192 if (node->process)
1193 continue;
1194
1195 /* We need to output all local functions that are used and not
1196 always inlined, as well as those that are reachable from
1197 outside the current compilation unit. */
1198 if (node->analyzed
1199 && !node->thunk.thunk_p
1200 && !node->alias
1201 && !node->global.inlined_to
1202 && !TREE_ASM_WRITTEN (decl)
1203 && !DECL_EXTERNAL (decl))
1204 {
1205 node->process = 1;
1206 if (node->same_comdat_group)
1207 {
1208 cgraph_node *next;
1209 for (next = dyn_cast<cgraph_node *> (node->same_comdat_group);
1210 next != node;
1211 next = dyn_cast<cgraph_node *> (next->same_comdat_group))
1212 if (!next->thunk.thunk_p && !next->alias
1213 && !next->comdat_local_p ())
1214 next->process = 1;
1215 }
1216 }
1217 else if (node->same_comdat_group)
1218 {
1219 #ifdef ENABLE_CHECKING
1220 check_same_comdat_groups = true;
1221 #endif
1222 }
1223 else
1224 {
1225 /* We should've reclaimed all functions that are not needed. */
1226 #ifdef ENABLE_CHECKING
1227 if (!node->global.inlined_to
1228 && gimple_has_body_p (decl)
1229 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1230 are inside partition, we can end up not removing the body since we no longer
1231 have analyzed node pointing to it. */
1232 && !node->in_other_partition
1233 && !node->alias
1234 && !node->clones
1235 && !DECL_EXTERNAL (decl))
1236 {
1237 node->debug ();
1238 internal_error ("failed to reclaim unneeded function");
1239 }
1240 #endif
1241 gcc_assert (node->global.inlined_to
1242 || !gimple_has_body_p (decl)
1243 || node->in_other_partition
1244 || node->clones
1245 || DECL_ARTIFICIAL (decl)
1246 || DECL_EXTERNAL (decl));
1247
1248 }
1249
1250 }
1251 #ifdef ENABLE_CHECKING
1252 if (check_same_comdat_groups)
1253 FOR_EACH_FUNCTION (node)
1254 if (node->same_comdat_group && !node->process)
1255 {
1256 tree decl = node->decl;
1257 if (!node->global.inlined_to
1258 && gimple_has_body_p (decl)
1259 /* FIXME: in an ltrans unit when the offline copy is outside a
1260 partition but inline copies are inside a partition, we can
1261 end up not removing the body since we no longer have an
1262 analyzed node pointing to it. */
1263 && !node->in_other_partition
1264 && !node->clones
1265 && !DECL_EXTERNAL (decl))
1266 {
1267 node->debug ();
1268 internal_error ("failed to reclaim unneeded function in same "
1269 "comdat group");
1270 }
1271 }
1272 #endif
1273 }
1274
1275 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1276 in lowered gimple form. IN_SSA is true if the gimple is in SSA.
1277
1278 Set current_function_decl and cfun to newly constructed empty function body.
1279 return basic block in the function body. */
1280
1281 basic_block
1282 init_lowered_empty_function (tree decl, bool in_ssa)
1283 {
1284 basic_block bb;
1285
1286 current_function_decl = decl;
1287 allocate_struct_function (decl, false);
1288 gimple_register_cfg_hooks ();
1289 init_empty_tree_cfg ();
1290
1291 if (in_ssa)
1292 {
1293 init_tree_ssa (cfun);
1294 init_ssa_operands (cfun);
1295 cfun->gimple_df->in_ssa_p = true;
1296 cfun->curr_properties |= PROP_ssa;
1297 }
1298
1299 DECL_INITIAL (decl) = make_node (BLOCK);
1300
1301 DECL_SAVED_TREE (decl) = error_mark_node;
1302 cfun->curr_properties |= (PROP_gimple_lcf | PROP_gimple_leh | PROP_gimple_any
1303 | PROP_cfg | PROP_loops);
1304
1305 set_loops_for_fn (cfun, ggc_cleared_alloc<loops> ());
1306 init_loops_structure (cfun, loops_for_fn (cfun), 1);
1307 loops_for_fn (cfun)->state |= LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
1308
1309 /* Create BB for body of the function and connect it properly. */
1310 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR_FOR_FN (cfun));
1311 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, EDGE_FALLTHRU);
1312 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1313 add_bb_to_loop (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
1314
1315 return bb;
1316 }
1317
1318 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1319 offset indicated by VIRTUAL_OFFSET, if that is
1320 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1321 zero for a result adjusting thunk. */
1322
1323 static tree
1324 thunk_adjust (gimple_stmt_iterator * bsi,
1325 tree ptr, bool this_adjusting,
1326 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1327 {
1328 gimple stmt;
1329 tree ret;
1330
1331 if (this_adjusting
1332 && fixed_offset != 0)
1333 {
1334 stmt = gimple_build_assign
1335 (ptr, fold_build_pointer_plus_hwi_loc (input_location,
1336 ptr,
1337 fixed_offset));
1338 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1339 }
1340
1341 /* If there's a virtual offset, look up that value in the vtable and
1342 adjust the pointer again. */
1343 if (virtual_offset)
1344 {
1345 tree vtabletmp;
1346 tree vtabletmp2;
1347 tree vtabletmp3;
1348
1349 if (!vtable_entry_type)
1350 {
1351 tree vfunc_type = make_node (FUNCTION_TYPE);
1352 TREE_TYPE (vfunc_type) = integer_type_node;
1353 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1354 layout_type (vfunc_type);
1355
1356 vtable_entry_type = build_pointer_type (vfunc_type);
1357 }
1358
1359 vtabletmp =
1360 create_tmp_reg (build_pointer_type
1361 (build_pointer_type (vtable_entry_type)), "vptr");
1362
1363 /* The vptr is always at offset zero in the object. */
1364 stmt = gimple_build_assign (vtabletmp,
1365 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1366 ptr));
1367 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1368
1369 /* Form the vtable address. */
1370 vtabletmp2 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp)),
1371 "vtableaddr");
1372 stmt = gimple_build_assign (vtabletmp2,
1373 build_simple_mem_ref (vtabletmp));
1374 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1375
1376 /* Find the entry with the vcall offset. */
1377 stmt = gimple_build_assign (vtabletmp2,
1378 fold_build_pointer_plus_loc (input_location,
1379 vtabletmp2,
1380 virtual_offset));
1381 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1382
1383 /* Get the offset itself. */
1384 vtabletmp3 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1385 "vcalloffset");
1386 stmt = gimple_build_assign (vtabletmp3,
1387 build_simple_mem_ref (vtabletmp2));
1388 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1389
1390 /* Adjust the `this' pointer. */
1391 ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
1392 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1393 GSI_CONTINUE_LINKING);
1394 }
1395
1396 if (!this_adjusting
1397 && fixed_offset != 0)
1398 /* Adjust the pointer by the constant. */
1399 {
1400 tree ptrtmp;
1401
1402 if (TREE_CODE (ptr) == VAR_DECL)
1403 ptrtmp = ptr;
1404 else
1405 {
1406 ptrtmp = create_tmp_reg (TREE_TYPE (ptr), "ptr");
1407 stmt = gimple_build_assign (ptrtmp, ptr);
1408 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1409 }
1410 ptr = fold_build_pointer_plus_hwi_loc (input_location,
1411 ptrtmp, fixed_offset);
1412 }
1413
1414 /* Emit the statement and gimplify the adjustment expression. */
1415 ret = create_tmp_reg (TREE_TYPE (ptr), "adjusted_this");
1416 stmt = gimple_build_assign (ret, ptr);
1417 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1418
1419 return ret;
1420 }
1421
1422 /* Expand thunk NODE to gimple if possible.
1423 When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
1424 no assembler is produced.
1425 When OUTPUT_ASM_THUNK is true, also produce assembler for
1426 thunks that are not lowered. */
1427
1428 bool
1429 cgraph_node::expand_thunk (bool output_asm_thunks, bool force_gimple_thunk)
1430 {
1431 bool this_adjusting = thunk.this_adjusting;
1432 HOST_WIDE_INT fixed_offset = thunk.fixed_offset;
1433 HOST_WIDE_INT virtual_value = thunk.virtual_value;
1434 tree virtual_offset = NULL;
1435 tree alias = callees->callee->decl;
1436 tree thunk_fndecl = decl;
1437 tree a;
1438
1439
1440 if (!force_gimple_thunk && this_adjusting
1441 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1442 virtual_value, alias))
1443 {
1444 const char *fnname;
1445 tree fn_block;
1446 tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1447
1448 if (!output_asm_thunks)
1449 {
1450 analyzed = true;
1451 return false;
1452 }
1453
1454 if (in_lto_p)
1455 get_body ();
1456 a = DECL_ARGUMENTS (thunk_fndecl);
1457
1458 current_function_decl = thunk_fndecl;
1459
1460 /* Ensure thunks are emitted in their correct sections. */
1461 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1462
1463 DECL_RESULT (thunk_fndecl)
1464 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1465 RESULT_DECL, 0, restype);
1466 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1467 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1468
1469 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1470 create one. */
1471 fn_block = make_node (BLOCK);
1472 BLOCK_VARS (fn_block) = a;
1473 DECL_INITIAL (thunk_fndecl) = fn_block;
1474 init_function_start (thunk_fndecl);
1475 cfun->is_thunk = 1;
1476 insn_locations_init ();
1477 set_curr_insn_location (DECL_SOURCE_LOCATION (thunk_fndecl));
1478 prologue_location = curr_insn_location ();
1479 assemble_start_function (thunk_fndecl, fnname);
1480
1481 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1482 fixed_offset, virtual_value, alias);
1483
1484 assemble_end_function (thunk_fndecl, fnname);
1485 insn_locations_finalize ();
1486 init_insn_lengths ();
1487 free_after_compilation (cfun);
1488 set_cfun (NULL);
1489 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1490 thunk.thunk_p = false;
1491 analyzed = false;
1492 }
1493 else
1494 {
1495 tree restype;
1496 basic_block bb, then_bb, else_bb, return_bb;
1497 gimple_stmt_iterator bsi;
1498 int nargs = 0;
1499 tree arg;
1500 int i;
1501 tree resdecl;
1502 tree restmp = NULL;
1503
1504 gimple call;
1505 gimple ret;
1506
1507 if (in_lto_p)
1508 get_body ();
1509 a = DECL_ARGUMENTS (thunk_fndecl);
1510
1511 current_function_decl = thunk_fndecl;
1512
1513 /* Ensure thunks are emitted in their correct sections. */
1514 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1515
1516 DECL_IGNORED_P (thunk_fndecl) = 1;
1517 bitmap_obstack_initialize (NULL);
1518
1519 if (thunk.virtual_offset_p)
1520 virtual_offset = size_int (virtual_value);
1521
1522 /* Build the return declaration for the function. */
1523 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1524 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1525 {
1526 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1527 DECL_ARTIFICIAL (resdecl) = 1;
1528 DECL_IGNORED_P (resdecl) = 1;
1529 DECL_RESULT (thunk_fndecl) = resdecl;
1530 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1531 }
1532 else
1533 resdecl = DECL_RESULT (thunk_fndecl);
1534
1535 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl, true);
1536
1537 bsi = gsi_start_bb (bb);
1538
1539 /* Build call to the function being thunked. */
1540 if (!VOID_TYPE_P (restype))
1541 {
1542 if (DECL_BY_REFERENCE (resdecl))
1543 restmp = gimple_fold_indirect_ref (resdecl);
1544 else if (!is_gimple_reg_type (restype))
1545 {
1546 restmp = resdecl;
1547 add_local_decl (cfun, restmp);
1548 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1549 }
1550 else
1551 restmp = create_tmp_reg (restype, "retval");
1552 }
1553
1554 for (arg = a; arg; arg = DECL_CHAIN (arg))
1555 nargs++;
1556 auto_vec<tree> vargs (nargs);
1557 if (this_adjusting)
1558 vargs.quick_push (thunk_adjust (&bsi, a, 1, fixed_offset,
1559 virtual_offset));
1560 else if (nargs)
1561 vargs.quick_push (a);
1562
1563 if (nargs)
1564 for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
1565 {
1566 tree tmp = arg;
1567 if (!is_gimple_val (arg))
1568 {
1569 tmp = create_tmp_reg (TYPE_MAIN_VARIANT
1570 (TREE_TYPE (arg)), "arg");
1571 gimple stmt = gimple_build_assign (tmp, arg);
1572 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1573 }
1574 vargs.quick_push (tmp);
1575 }
1576 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1577 callees->call_stmt = call;
1578 gimple_call_set_from_thunk (call, true);
1579 if (restmp)
1580 {
1581 gimple_call_set_lhs (call, restmp);
1582 gcc_assert (useless_type_conversion_p (TREE_TYPE (restmp),
1583 TREE_TYPE (TREE_TYPE (alias))));
1584 }
1585 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1586 if (!(gimple_call_flags (call) & ECF_NORETURN))
1587 {
1588 if (restmp && !this_adjusting
1589 && (fixed_offset || virtual_offset))
1590 {
1591 tree true_label = NULL_TREE;
1592
1593 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1594 {
1595 gimple stmt;
1596 /* If the return type is a pointer, we need to
1597 protect against NULL. We know there will be an
1598 adjustment, because that's why we're emitting a
1599 thunk. */
1600 then_bb = create_basic_block (NULL, (void *) 0, bb);
1601 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1602 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1603 add_bb_to_loop (then_bb, bb->loop_father);
1604 add_bb_to_loop (return_bb, bb->loop_father);
1605 add_bb_to_loop (else_bb, bb->loop_father);
1606 remove_edge (single_succ_edge (bb));
1607 true_label = gimple_block_label (then_bb);
1608 stmt = gimple_build_cond (NE_EXPR, restmp,
1609 build_zero_cst (TREE_TYPE (restmp)),
1610 NULL_TREE, NULL_TREE);
1611 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1612 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1613 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1614 make_edge (return_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1615 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1616 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1617 bsi = gsi_last_bb (then_bb);
1618 }
1619
1620 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1621 fixed_offset, virtual_offset);
1622 if (true_label)
1623 {
1624 gimple stmt;
1625 bsi = gsi_last_bb (else_bb);
1626 stmt = gimple_build_assign (restmp,
1627 build_zero_cst (TREE_TYPE (restmp)));
1628 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1629 bsi = gsi_last_bb (return_bb);
1630 }
1631 }
1632 else
1633 gimple_call_set_tail (call, true);
1634
1635 /* Build return value. */
1636 ret = gimple_build_return (restmp);
1637 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1638 }
1639 else
1640 {
1641 gimple_call_set_tail (call, true);
1642 remove_edge (single_succ_edge (bb));
1643 }
1644
1645 cfun->gimple_df->in_ssa_p = true;
1646 /* FIXME: C++ FE should stop setting TREE_ASM_WRITTEN on thunks. */
1647 TREE_ASM_WRITTEN (thunk_fndecl) = false;
1648 delete_unreachable_blocks ();
1649 update_ssa (TODO_update_ssa);
1650 #ifdef ENABLE_CHECKING
1651 verify_flow_info ();
1652 #endif
1653 free_dominance_info (CDI_DOMINATORS);
1654
1655 /* Since we want to emit the thunk, we explicitly mark its name as
1656 referenced. */
1657 thunk.thunk_p = false;
1658 lowered = true;
1659 bitmap_obstack_release (NULL);
1660 }
1661 current_function_decl = NULL;
1662 set_cfun (NULL);
1663 return true;
1664 }
1665
1666 /* Assemble thunks and aliases associated to node. */
1667
1668 void
1669 cgraph_node::assemble_thunks_and_aliases (void)
1670 {
1671 cgraph_edge *e;
1672 ipa_ref *ref;
1673
1674 for (e = callers; e;)
1675 if (e->caller->thunk.thunk_p)
1676 {
1677 cgraph_node *thunk = e->caller;
1678
1679 e = e->next_caller;
1680 thunk->expand_thunk (true, false);
1681 thunk->assemble_thunks_and_aliases ();
1682 }
1683 else
1684 e = e->next_caller;
1685
1686 FOR_EACH_ALIAS (this, ref)
1687 {
1688 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
1689 bool saved_written = TREE_ASM_WRITTEN (decl);
1690
1691 /* Force assemble_alias to really output the alias this time instead
1692 of buffering it in same alias pairs. */
1693 TREE_ASM_WRITTEN (decl) = 1;
1694 do_assemble_alias (alias->decl,
1695 DECL_ASSEMBLER_NAME (decl));
1696 alias->assemble_thunks_and_aliases ();
1697 TREE_ASM_WRITTEN (decl) = saved_written;
1698 }
1699 }
1700
1701 /* Expand function specified by node. */
1702
1703 void
1704 cgraph_node::expand (void)
1705 {
1706 location_t saved_loc;
1707
1708 /* We ought to not compile any inline clones. */
1709 gcc_assert (!global.inlined_to);
1710
1711 announce_function (decl);
1712 process = 0;
1713 gcc_assert (lowered);
1714 get_body ();
1715
1716 /* Generate RTL for the body of DECL. */
1717
1718 timevar_push (TV_REST_OF_COMPILATION);
1719
1720 gcc_assert (symtab->global_info_ready);
1721
1722 /* Initialize the default bitmap obstack. */
1723 bitmap_obstack_initialize (NULL);
1724
1725 /* Initialize the RTL code for the function. */
1726 current_function_decl = decl;
1727 saved_loc = input_location;
1728 input_location = DECL_SOURCE_LOCATION (decl);
1729 init_function_start (decl);
1730
1731 gimple_register_cfg_hooks ();
1732
1733 bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
1734
1735 execute_all_ipa_transforms ();
1736
1737 /* Perform all tree transforms and optimizations. */
1738
1739 /* Signal the start of passes. */
1740 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START, NULL);
1741
1742 execute_pass_list (cfun, g->get_passes ()->all_passes);
1743
1744 /* Signal the end of passes. */
1745 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END, NULL);
1746
1747 bitmap_obstack_release (&reg_obstack);
1748
1749 /* Release the default bitmap obstack. */
1750 bitmap_obstack_release (NULL);
1751
1752 /* If requested, warn about function definitions where the function will
1753 return a value (usually of some struct or union type) which itself will
1754 take up a lot of stack space. */
1755 if (warn_larger_than && !DECL_EXTERNAL (decl) && TREE_TYPE (decl))
1756 {
1757 tree ret_type = TREE_TYPE (TREE_TYPE (decl));
1758
1759 if (ret_type && TYPE_SIZE_UNIT (ret_type)
1760 && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
1761 && 0 < compare_tree_int (TYPE_SIZE_UNIT (ret_type),
1762 larger_than_size))
1763 {
1764 unsigned int size_as_int
1765 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
1766
1767 if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
1768 warning (OPT_Wlarger_than_, "size of return value of %q+D is %u bytes",
1769 decl, size_as_int);
1770 else
1771 warning (OPT_Wlarger_than_, "size of return value of %q+D is larger than %wd bytes",
1772 decl, larger_than_size);
1773 }
1774 }
1775
1776 gimple_set_body (decl, NULL);
1777 if (DECL_STRUCT_FUNCTION (decl) == 0
1778 && !cgraph_node::get (decl)->origin)
1779 {
1780 /* Stop pointing to the local nodes about to be freed.
1781 But DECL_INITIAL must remain nonzero so we know this
1782 was an actual function definition.
1783 For a nested function, this is done in c_pop_function_context.
1784 If rest_of_compilation set this to 0, leave it 0. */
1785 if (DECL_INITIAL (decl) != 0)
1786 DECL_INITIAL (decl) = error_mark_node;
1787 }
1788
1789 input_location = saved_loc;
1790
1791 ggc_collect ();
1792 timevar_pop (TV_REST_OF_COMPILATION);
1793
1794 /* Make sure that BE didn't give up on compiling. */
1795 gcc_assert (TREE_ASM_WRITTEN (decl));
1796 set_cfun (NULL);
1797 current_function_decl = NULL;
1798
1799 /* It would make a lot more sense to output thunks before function body to get more
1800 forward and lest backwarding jumps. This however would need solving problem
1801 with comdats. See PR48668. Also aliases must come after function itself to
1802 make one pass assemblers, like one on AIX, happy. See PR 50689.
1803 FIXME: Perhaps thunks should be move before function IFF they are not in comdat
1804 groups. */
1805 assemble_thunks_and_aliases ();
1806 release_body ();
1807 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1808 points to the dead function body. */
1809 remove_callees ();
1810 remove_all_references ();
1811 }
1812
1813 /* Node comparer that is responsible for the order that corresponds
1814 to time when a function was launched for the first time. */
1815
1816 static int
1817 node_cmp (const void *pa, const void *pb)
1818 {
1819 const cgraph_node *a = *(const cgraph_node * const *) pa;
1820 const cgraph_node *b = *(const cgraph_node * const *) pb;
1821
1822 /* Functions with time profile must be before these without profile. */
1823 if (!a->tp_first_run || !b->tp_first_run)
1824 return a->tp_first_run - b->tp_first_run;
1825
1826 return a->tp_first_run != b->tp_first_run
1827 ? b->tp_first_run - a->tp_first_run
1828 : b->order - a->order;
1829 }
1830
1831 /* Expand all functions that must be output.
1832
1833 Attempt to topologically sort the nodes so function is output when
1834 all called functions are already assembled to allow data to be
1835 propagated across the callgraph. Use a stack to get smaller distance
1836 between a function and its callees (later we may choose to use a more
1837 sophisticated algorithm for function reordering; we will likely want
1838 to use subsections to make the output functions appear in top-down
1839 order). */
1840
1841 static void
1842 expand_all_functions (void)
1843 {
1844 cgraph_node *node;
1845 cgraph_node **order = XCNEWVEC (cgraph_node *,
1846 symtab->cgraph_count);
1847 unsigned int expanded_func_count = 0, profiled_func_count = 0;
1848 int order_pos, new_order_pos = 0;
1849 int i;
1850
1851 order_pos = ipa_reverse_postorder (order);
1852 gcc_assert (order_pos == symtab->cgraph_count);
1853
1854 /* Garbage collector may remove inline clones we eliminate during
1855 optimization. So we must be sure to not reference them. */
1856 for (i = 0; i < order_pos; i++)
1857 if (order[i]->process)
1858 order[new_order_pos++] = order[i];
1859
1860 if (flag_profile_reorder_functions)
1861 qsort (order, new_order_pos, sizeof (cgraph_node *), node_cmp);
1862
1863 for (i = new_order_pos - 1; i >= 0; i--)
1864 {
1865 node = order[i];
1866
1867 if (node->process)
1868 {
1869 expanded_func_count++;
1870 if(node->tp_first_run)
1871 profiled_func_count++;
1872
1873 if (symtab->dump_file)
1874 fprintf (symtab->dump_file,
1875 "Time profile order in expand_all_functions:%s:%d\n",
1876 node->asm_name (), node->tp_first_run);
1877 node->process = 0;
1878 node->expand ();
1879 }
1880 }
1881
1882 if (dump_file)
1883 fprintf (dump_file, "Expanded functions with time profile (%s):%u/%u\n",
1884 main_input_filename, profiled_func_count, expanded_func_count);
1885
1886 if (symtab->dump_file && flag_profile_reorder_functions)
1887 fprintf (symtab->dump_file, "Expanded functions with time profile:%u/%u\n",
1888 profiled_func_count, expanded_func_count);
1889
1890 symtab->process_new_functions ();
1891 free_gimplify_stack ();
1892
1893 free (order);
1894 }
1895
1896 /* This is used to sort the node types by the cgraph order number. */
1897
1898 enum cgraph_order_sort_kind
1899 {
1900 ORDER_UNDEFINED = 0,
1901 ORDER_FUNCTION,
1902 ORDER_VAR,
1903 ORDER_ASM
1904 };
1905
1906 struct cgraph_order_sort
1907 {
1908 enum cgraph_order_sort_kind kind;
1909 union
1910 {
1911 cgraph_node *f;
1912 varpool_node *v;
1913 asm_node *a;
1914 } u;
1915 };
1916
1917 /* Output all functions, variables, and asm statements in the order
1918 according to their order fields, which is the order in which they
1919 appeared in the file. This implements -fno-toplevel-reorder. In
1920 this mode we may output functions and variables which don't really
1921 need to be output.
1922 When NO_REORDER is true only do this for symbols marked no reorder. */
1923
1924 static void
1925 output_in_order (bool no_reorder)
1926 {
1927 int max;
1928 cgraph_order_sort *nodes;
1929 int i;
1930 cgraph_node *pf;
1931 varpool_node *pv;
1932 asm_node *pa;
1933 max = symtab->order;
1934 nodes = XCNEWVEC (cgraph_order_sort, max);
1935
1936 FOR_EACH_DEFINED_FUNCTION (pf)
1937 {
1938 if (pf->process && !pf->thunk.thunk_p && !pf->alias)
1939 {
1940 if (no_reorder && !pf->no_reorder)
1941 continue;
1942 i = pf->order;
1943 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1944 nodes[i].kind = ORDER_FUNCTION;
1945 nodes[i].u.f = pf;
1946 }
1947 }
1948
1949 FOR_EACH_DEFINED_VARIABLE (pv)
1950 if (!DECL_EXTERNAL (pv->decl))
1951 {
1952 if (no_reorder && !pv->no_reorder)
1953 continue;
1954 i = pv->order;
1955 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1956 nodes[i].kind = ORDER_VAR;
1957 nodes[i].u.v = pv;
1958 }
1959
1960 for (pa = symtab->first_asm_symbol (); pa; pa = pa->next)
1961 {
1962 i = pa->order;
1963 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1964 nodes[i].kind = ORDER_ASM;
1965 nodes[i].u.a = pa;
1966 }
1967
1968 /* In toplevel reorder mode we output all statics; mark them as needed. */
1969
1970 for (i = 0; i < max; ++i)
1971 if (nodes[i].kind == ORDER_VAR)
1972 nodes[i].u.v->finalize_named_section_flags ();
1973
1974 for (i = 0; i < max; ++i)
1975 {
1976 switch (nodes[i].kind)
1977 {
1978 case ORDER_FUNCTION:
1979 nodes[i].u.f->process = 0;
1980 nodes[i].u.f->expand ();
1981 break;
1982
1983 case ORDER_VAR:
1984 nodes[i].u.v->assemble_decl ();
1985 break;
1986
1987 case ORDER_ASM:
1988 assemble_asm (nodes[i].u.a->asm_str);
1989 break;
1990
1991 case ORDER_UNDEFINED:
1992 break;
1993
1994 default:
1995 gcc_unreachable ();
1996 }
1997 }
1998
1999 symtab->clear_asm_symbols ();
2000
2001 free (nodes);
2002 }
2003
2004 static void
2005 ipa_passes (void)
2006 {
2007 gcc::pass_manager *passes = g->get_passes ();
2008
2009 set_cfun (NULL);
2010 current_function_decl = NULL;
2011 gimple_register_cfg_hooks ();
2012 bitmap_obstack_initialize (NULL);
2013
2014 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
2015
2016 if (!in_lto_p)
2017 {
2018 execute_ipa_pass_list (passes->all_small_ipa_passes);
2019 if (seen_error ())
2020 return;
2021 }
2022
2023 /* This extra symtab_remove_unreachable_nodes pass tends to catch some
2024 devirtualization and other changes where removal iterate. */
2025 symtab->remove_unreachable_nodes (true, symtab->dump_file);
2026
2027 /* If pass_all_early_optimizations was not scheduled, the state of
2028 the cgraph will not be properly updated. Update it now. */
2029 if (symtab->state < IPA_SSA)
2030 symtab->state = IPA_SSA;
2031
2032 if (!in_lto_p)
2033 {
2034 /* Generate coverage variables and constructors. */
2035 coverage_finish ();
2036
2037 /* Process new functions added. */
2038 set_cfun (NULL);
2039 current_function_decl = NULL;
2040 symtab->process_new_functions ();
2041
2042 execute_ipa_summary_passes
2043 ((ipa_opt_pass_d *) passes->all_regular_ipa_passes);
2044 }
2045
2046 /* Some targets need to handle LTO assembler output specially. */
2047 if (flag_generate_lto)
2048 targetm.asm_out.lto_start ();
2049
2050 if (!in_lto_p)
2051 ipa_write_summaries ();
2052
2053 if (flag_generate_lto)
2054 targetm.asm_out.lto_end ();
2055
2056 if (!flag_ltrans && (in_lto_p || !flag_lto || flag_fat_lto_objects))
2057 execute_ipa_pass_list (passes->all_regular_ipa_passes);
2058 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
2059
2060 bitmap_obstack_release (NULL);
2061 }
2062
2063
2064 /* Return string alias is alias of. */
2065
2066 static tree
2067 get_alias_symbol (tree decl)
2068 {
2069 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
2070 return get_identifier (TREE_STRING_POINTER
2071 (TREE_VALUE (TREE_VALUE (alias))));
2072 }
2073
2074
2075 /* Weakrefs may be associated to external decls and thus not output
2076 at expansion time. Emit all necessary aliases. */
2077
2078 void
2079 symbol_table::output_weakrefs (void)
2080 {
2081 symtab_node *node;
2082 FOR_EACH_SYMBOL (node)
2083 if (node->alias
2084 && !TREE_ASM_WRITTEN (node->decl)
2085 && node->weakref)
2086 {
2087 tree target;
2088
2089 /* Weakrefs are special by not requiring target definition in current
2090 compilation unit. It is thus bit hard to work out what we want to
2091 alias.
2092 When alias target is defined, we need to fetch it from symtab reference,
2093 otherwise it is pointed to by alias_target. */
2094 if (node->alias_target)
2095 target = (DECL_P (node->alias_target)
2096 ? DECL_ASSEMBLER_NAME (node->alias_target)
2097 : node->alias_target);
2098 else if (node->analyzed)
2099 target = DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl);
2100 else
2101 {
2102 gcc_unreachable ();
2103 target = get_alias_symbol (node->decl);
2104 }
2105 do_assemble_alias (node->decl, target);
2106 }
2107 }
2108
2109 /* Perform simple optimizations based on callgraph. */
2110
2111 void
2112 symbol_table::compile (void)
2113 {
2114 if (seen_error ())
2115 return;
2116
2117 #ifdef ENABLE_CHECKING
2118 symtab_node::verify_symtab_nodes ();
2119 #endif
2120
2121 timevar_push (TV_CGRAPHOPT);
2122 if (pre_ipa_mem_report)
2123 {
2124 fprintf (stderr, "Memory consumption before IPA\n");
2125 dump_memory_report (false);
2126 }
2127 if (!quiet_flag)
2128 fprintf (stderr, "Performing interprocedural optimizations\n");
2129 state = IPA;
2130
2131 /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
2132 if (flag_lto)
2133 lto_streamer_hooks_init ();
2134
2135 /* Don't run the IPA passes if there was any error or sorry messages. */
2136 if (!seen_error ())
2137 ipa_passes ();
2138
2139 /* Do nothing else if any IPA pass found errors or if we are just streaming LTO. */
2140 if (seen_error ()
2141 || (!in_lto_p && flag_lto && !flag_fat_lto_objects))
2142 {
2143 timevar_pop (TV_CGRAPHOPT);
2144 return;
2145 }
2146
2147 /* This pass remove bodies of extern inline functions we never inlined.
2148 Do this later so other IPA passes see what is really going on.
2149 FIXME: This should be run just after inlining by pasmanager. */
2150 remove_unreachable_nodes (false, dump_file);
2151 global_info_ready = true;
2152 if (dump_file)
2153 {
2154 fprintf (dump_file, "Optimized ");
2155 symtab_node:: dump_table (dump_file);
2156 }
2157 if (post_ipa_mem_report)
2158 {
2159 fprintf (stderr, "Memory consumption after IPA\n");
2160 dump_memory_report (false);
2161 }
2162 timevar_pop (TV_CGRAPHOPT);
2163
2164 /* Output everything. */
2165 (*debug_hooks->assembly_start) ();
2166 if (!quiet_flag)
2167 fprintf (stderr, "Assembling functions:\n");
2168 #ifdef ENABLE_CHECKING
2169 symtab_node::verify_symtab_nodes ();
2170 #endif
2171
2172 materialize_all_clones ();
2173 bitmap_obstack_initialize (NULL);
2174 execute_ipa_pass_list (g->get_passes ()->all_late_ipa_passes);
2175 bitmap_obstack_release (NULL);
2176 mark_functions_to_output ();
2177
2178 /* When weakref support is missing, we autmatically translate all
2179 references to NODE to references to its ultimate alias target.
2180 The renaming mechanizm uses flag IDENTIFIER_TRANSPARENT_ALIAS and
2181 TREE_CHAIN.
2182
2183 Set up this mapping before we output any assembler but once we are sure
2184 that all symbol renaming is done.
2185
2186 FIXME: All this uglyness can go away if we just do renaming at gimple
2187 level by physically rewritting the IL. At the moment we can only redirect
2188 calls, so we need infrastructure for renaming references as well. */
2189 #ifndef ASM_OUTPUT_WEAKREF
2190 symtab_node *node;
2191
2192 FOR_EACH_SYMBOL (node)
2193 if (node->alias
2194 && lookup_attribute ("weakref", DECL_ATTRIBUTES (node->decl)))
2195 {
2196 IDENTIFIER_TRANSPARENT_ALIAS
2197 (DECL_ASSEMBLER_NAME (node->decl)) = 1;
2198 TREE_CHAIN (DECL_ASSEMBLER_NAME (node->decl))
2199 = (node->alias_target ? node->alias_target
2200 : DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl));
2201 }
2202 #endif
2203
2204 state = EXPANSION;
2205
2206 if (!flag_toplevel_reorder)
2207 output_in_order (false);
2208 else
2209 {
2210 /* Output first asm statements and anything ordered. The process
2211 flag is cleared for these nodes, so we skip them later. */
2212 output_in_order (true);
2213 expand_all_functions ();
2214 output_variables ();
2215 }
2216
2217 process_new_functions ();
2218 state = FINISHED;
2219 output_weakrefs ();
2220
2221 if (dump_file)
2222 {
2223 fprintf (dump_file, "\nFinal ");
2224 symtab_node::dump_table (dump_file);
2225 }
2226 #ifdef ENABLE_CHECKING
2227 symtab_node::verify_symtab_nodes ();
2228 /* Double check that all inline clones are gone and that all
2229 function bodies have been released from memory. */
2230 if (!seen_error ())
2231 {
2232 cgraph_node *node;
2233 bool error_found = false;
2234
2235 FOR_EACH_DEFINED_FUNCTION (node)
2236 if (node->global.inlined_to
2237 || gimple_has_body_p (node->decl))
2238 {
2239 error_found = true;
2240 node->debug ();
2241 }
2242 if (error_found)
2243 internal_error ("nodes with unreleased memory found");
2244 }
2245 #endif
2246 }
2247
2248
2249 /* Analyze the whole compilation unit once it is parsed completely. */
2250
2251 void
2252 symbol_table::finalize_compilation_unit (void)
2253 {
2254 timevar_push (TV_CGRAPH);
2255
2256 /* If we're here there's no current function anymore. Some frontends
2257 are lazy in clearing these. */
2258 current_function_decl = NULL;
2259 set_cfun (NULL);
2260
2261 /* Do not skip analyzing the functions if there were errors, we
2262 miss diagnostics for following functions otherwise. */
2263
2264 /* Emit size functions we didn't inline. */
2265 finalize_size_functions ();
2266
2267 /* Mark alias targets necessary and emit diagnostics. */
2268 handle_alias_pairs ();
2269
2270 if (!quiet_flag)
2271 {
2272 fprintf (stderr, "\nAnalyzing compilation unit\n");
2273 fflush (stderr);
2274 }
2275
2276 if (flag_dump_passes)
2277 dump_passes ();
2278
2279 /* Gimplify and lower all functions, compute reachability and
2280 remove unreachable nodes. */
2281 analyze_functions ();
2282
2283 /* Mark alias targets necessary and emit diagnostics. */
2284 handle_alias_pairs ();
2285
2286 /* Gimplify and lower thunks. */
2287 analyze_functions ();
2288
2289 /* Finally drive the pass manager. */
2290 compile ();
2291
2292 timevar_pop (TV_CGRAPH);
2293 }
2294
2295 /* Reset all state within cgraphunit.c so that we can rerun the compiler
2296 within the same process. For use by toplev::finalize. */
2297
2298 void
2299 cgraphunit_c_finalize (void)
2300 {
2301 gcc_assert (cgraph_new_nodes.length () == 0);
2302 cgraph_new_nodes.truncate (0);
2303
2304 vtable_entry_type = NULL;
2305 queued_nodes = &symtab_terminator;
2306
2307 first_analyzed = NULL;
2308 first_analyzed_var = NULL;
2309 }
2310
2311 /* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
2312 kind of wrapper method. */
2313
2314 void
2315 cgraph_node::create_wrapper (cgraph_node *target)
2316 {
2317 /* Preserve DECL_RESULT so we get right by reference flag. */
2318 tree decl_result = DECL_RESULT (decl);
2319
2320 /* Remove the function's body but keep arguments to be reused
2321 for thunk. */
2322 release_body (true);
2323 reset ();
2324
2325 DECL_RESULT (decl) = decl_result;
2326 DECL_INITIAL (decl) = NULL;
2327 allocate_struct_function (decl, false);
2328 set_cfun (NULL);
2329
2330 /* Turn alias into thunk and expand it into GIMPLE representation. */
2331 definition = true;
2332 thunk.thunk_p = true;
2333 thunk.this_adjusting = false;
2334
2335 cgraph_edge *e = create_edge (target, NULL, 0, CGRAPH_FREQ_BASE);
2336
2337 expand_thunk (false, true);
2338 e->call_stmt_cannot_inline_p = true;
2339
2340 /* Inline summary set-up. */
2341 analyze ();
2342 inline_analyze_function (this);
2343 }
2344
2345 #include "gt-cgraphunit.h"