ipa-cp.c (ipcp_cloning_candidate_p): Use opt_for_fn.
[gcc.git] / gcc / cgraphunit.c
1 /* Driver of optimization process
2 Copyright (C) 2003-2014 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This module implements main driver of compilation process.
22
23 The main scope of this file is to act as an interface in between
24 tree based frontends and the backend.
25
26 The front-end is supposed to use following functionality:
27
28 - finalize_function
29
30 This function is called once front-end has parsed whole body of function
31 and it is certain that the function body nor the declaration will change.
32
33 (There is one exception needed for implementing GCC extern inline
34 function.)
35
36 - varpool_finalize_decl
37
38 This function has same behavior as the above but is used for static
39 variables.
40
41 - add_asm_node
42
43 Insert new toplevel ASM statement
44
45 - finalize_compilation_unit
46
47 This function is called once (source level) compilation unit is finalized
48 and it will no longer change.
49
50 The symbol table is constructed starting from the trivially needed
51 symbols finalized by the frontend. Functions are lowered into
52 GIMPLE representation and callgraph/reference lists are constructed.
53 Those are used to discover other necessary functions and variables.
54
55 At the end the bodies of unreachable functions are removed.
56
57 The function can be called multiple times when multiple source level
58 compilation units are combined.
59
60 - compile
61
62 This passes control to the back-end. Optimizations are performed and
63 final assembler is generated. This is done in the following way. Note
64 that with link time optimization the process is split into three
65 stages (compile time, linktime analysis and parallel linktime as
66 indicated bellow).
67
68 Compile time:
69
70 1) Inter-procedural optimization.
71 (ipa_passes)
72
73 This part is further split into:
74
75 a) early optimizations. These are local passes executed in
76 the topological order on the callgraph.
77
78 The purpose of early optimiations is to optimize away simple
79 things that may otherwise confuse IP analysis. Very simple
80 propagation across the callgraph is done i.e. to discover
81 functions without side effects and simple inlining is performed.
82
83 b) early small interprocedural passes.
84
85 Those are interprocedural passes executed only at compilation
86 time. These include, for example, transational memory lowering,
87 unreachable code removal and other simple transformations.
88
89 c) IP analysis stage. All interprocedural passes do their
90 analysis.
91
92 Interprocedural passes differ from small interprocedural
93 passes by their ability to operate across whole program
94 at linktime. Their analysis stage is performed early to
95 both reduce linking times and linktime memory usage by
96 not having to represent whole program in memory.
97
98 d) LTO sreaming. When doing LTO, everything important gets
99 streamed into the object file.
100
101 Compile time and or linktime analysis stage (WPA):
102
103 At linktime units gets streamed back and symbol table is
104 merged. Function bodies are not streamed in and not
105 available.
106 e) IP propagation stage. All IP passes execute their
107 IP propagation. This is done based on the earlier analysis
108 without having function bodies at hand.
109 f) Ltrans streaming. When doing WHOPR LTO, the program
110 is partitioned and streamed into multple object files.
111
112 Compile time and/or parallel linktime stage (ltrans)
113
114 Each of the object files is streamed back and compiled
115 separately. Now the function bodies becomes available
116 again.
117
118 2) Virtual clone materialization
119 (cgraph_materialize_clone)
120
121 IP passes can produce copies of existing functoins (such
122 as versioned clones or inline clones) without actually
123 manipulating their bodies by creating virtual clones in
124 the callgraph. At this time the virtual clones are
125 turned into real functions
126 3) IP transformation
127
128 All IP passes transform function bodies based on earlier
129 decision of the IP propagation.
130
131 4) late small IP passes
132
133 Simple IP passes working within single program partition.
134
135 5) Expansion
136 (expand_all_functions)
137
138 At this stage functions that needs to be output into
139 assembler are identified and compiled in topological order
140 6) Output of variables and aliases
141 Now it is known what variable references was not optimized
142 out and thus all variables are output to the file.
143
144 Note that with -fno-toplevel-reorder passes 5 and 6
145 are combined together in cgraph_output_in_order.
146
147 Finally there are functions to manipulate the callgraph from
148 backend.
149 - cgraph_add_new_function is used to add backend produced
150 functions introduced after the unit is finalized.
151 The functions are enqueue for later processing and inserted
152 into callgraph with cgraph_process_new_functions.
153
154 - cgraph_function_versioning
155
156 produces a copy of function into new one (a version)
157 and apply simple transformations
158 */
159
160 #include "config.h"
161 #include "system.h"
162 #include "coretypes.h"
163 #include "tm.h"
164 #include "tree.h"
165 #include "varasm.h"
166 #include "stor-layout.h"
167 #include "stringpool.h"
168 #include "output.h"
169 #include "rtl.h"
170 #include "predict.h"
171 #include "vec.h"
172 #include "hashtab.h"
173 #include "hash-set.h"
174 #include "machmode.h"
175 #include "hard-reg-set.h"
176 #include "input.h"
177 #include "function.h"
178 #include "basic-block.h"
179 #include "tree-ssa-alias.h"
180 #include "internal-fn.h"
181 #include "gimple-fold.h"
182 #include "gimple-expr.h"
183 #include "is-a.h"
184 #include "gimple.h"
185 #include "gimplify.h"
186 #include "gimple-iterator.h"
187 #include "gimplify-me.h"
188 #include "gimple-ssa.h"
189 #include "tree-cfg.h"
190 #include "tree-into-ssa.h"
191 #include "tree-ssa.h"
192 #include "tree-inline.h"
193 #include "langhooks.h"
194 #include "toplev.h"
195 #include "flags.h"
196 #include "debug.h"
197 #include "target.h"
198 #include "diagnostic.h"
199 #include "params.h"
200 #include "intl.h"
201 #include "hash-map.h"
202 #include "plugin-api.h"
203 #include "ipa-ref.h"
204 #include "cgraph.h"
205 #include "alloc-pool.h"
206 #include "ipa-prop.h"
207 #include "tree-iterator.h"
208 #include "tree-pass.h"
209 #include "tree-dump.h"
210 #include "gimple-pretty-print.h"
211 #include "output.h"
212 #include "coverage.h"
213 #include "plugin.h"
214 #include "ipa-inline.h"
215 #include "ipa-utils.h"
216 #include "lto-streamer.h"
217 #include "except.h"
218 #include "cfgloop.h"
219 #include "regset.h" /* FIXME: For reg_obstack. */
220 #include "context.h"
221 #include "pass_manager.h"
222 #include "tree-nested.h"
223 #include "gimplify.h"
224 #include "dbgcnt.h"
225 #include "tree-chkp.h"
226 #include "lto-section-names.h"
227 #include "omp-low.h"
228
229 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
230 secondary queue used during optimization to accommodate passes that
231 may generate new functions that need to be optimized and expanded. */
232 vec<cgraph_node *> cgraph_new_nodes;
233
234 static void expand_all_functions (void);
235 static void mark_functions_to_output (void);
236 static void handle_alias_pairs (void);
237
238 /* Used for vtable lookup in thunk adjusting. */
239 static GTY (()) tree vtable_entry_type;
240
241 /* Determine if symbol declaration is needed. That is, visible to something
242 either outside this translation unit, something magic in the system
243 configury */
244 bool
245 symtab_node::needed_p (void)
246 {
247 /* Double check that no one output the function into assembly file
248 early. */
249 gcc_checking_assert (!DECL_ASSEMBLER_NAME_SET_P (decl)
250 || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)));
251
252 if (!definition)
253 return false;
254
255 if (DECL_EXTERNAL (decl))
256 return false;
257
258 /* If the user told us it is used, then it must be so. */
259 if (force_output)
260 return true;
261
262 /* ABI forced symbols are needed when they are external. */
263 if (forced_by_abi && TREE_PUBLIC (decl))
264 return true;
265
266 /* Keep constructors, destructors and virtual functions. */
267 if (TREE_CODE (decl) == FUNCTION_DECL
268 && (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl)))
269 return true;
270
271 /* Externally visible variables must be output. The exception is
272 COMDAT variables that must be output only when they are needed. */
273 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
274 return true;
275
276 return false;
277 }
278
279 /* Head and terminator of the queue of nodes to be processed while building
280 callgraph. */
281
282 static symtab_node symtab_terminator;
283 static symtab_node *queued_nodes = &symtab_terminator;
284
285 /* Add NODE to queue starting at QUEUED_NODES.
286 The queue is linked via AUX pointers and terminated by pointer to 1. */
287
288 static void
289 enqueue_node (symtab_node *node)
290 {
291 if (node->aux)
292 return;
293 gcc_checking_assert (queued_nodes);
294 node->aux = queued_nodes;
295 queued_nodes = node;
296 }
297
298 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
299 functions into callgraph in a way so they look like ordinary reachable
300 functions inserted into callgraph already at construction time. */
301
302 void
303 symbol_table::process_new_functions (void)
304 {
305 tree fndecl;
306
307 if (!cgraph_new_nodes.exists ())
308 return;
309
310 handle_alias_pairs ();
311 /* Note that this queue may grow as its being processed, as the new
312 functions may generate new ones. */
313 for (unsigned i = 0; i < cgraph_new_nodes.length (); i++)
314 {
315 cgraph_node *node = cgraph_new_nodes[i];
316 fndecl = node->decl;
317 switch (state)
318 {
319 case CONSTRUCTION:
320 /* At construction time we just need to finalize function and move
321 it into reachable functions list. */
322
323 cgraph_node::finalize_function (fndecl, false);
324 call_cgraph_insertion_hooks (node);
325 enqueue_node (node);
326 break;
327
328 case IPA:
329 case IPA_SSA:
330 /* When IPA optimization already started, do all essential
331 transformations that has been already performed on the whole
332 cgraph but not on this function. */
333
334 gimple_register_cfg_hooks ();
335 if (!node->analyzed)
336 node->analyze ();
337 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
338 if (state == IPA_SSA
339 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
340 g->get_passes ()->execute_early_local_passes ();
341 else if (inline_summary_vec != NULL)
342 compute_inline_parameters (node, true);
343 free_dominance_info (CDI_POST_DOMINATORS);
344 free_dominance_info (CDI_DOMINATORS);
345 pop_cfun ();
346 call_cgraph_insertion_hooks (node);
347 break;
348
349 case EXPANSION:
350 /* Functions created during expansion shall be compiled
351 directly. */
352 node->process = 0;
353 call_cgraph_insertion_hooks (node);
354 node->expand ();
355 break;
356
357 default:
358 gcc_unreachable ();
359 break;
360 }
361 }
362
363 cgraph_new_nodes.release ();
364 }
365
366 /* As an GCC extension we allow redefinition of the function. The
367 semantics when both copies of bodies differ is not well defined.
368 We replace the old body with new body so in unit at a time mode
369 we always use new body, while in normal mode we may end up with
370 old body inlined into some functions and new body expanded and
371 inlined in others.
372
373 ??? It may make more sense to use one body for inlining and other
374 body for expanding the function but this is difficult to do. */
375
376 void
377 cgraph_node::reset (void)
378 {
379 /* If process is set, then we have already begun whole-unit analysis.
380 This is *not* testing for whether we've already emitted the function.
381 That case can be sort-of legitimately seen with real function redefinition
382 errors. I would argue that the front end should never present us with
383 such a case, but don't enforce that for now. */
384 gcc_assert (!process);
385
386 /* Reset our data structures so we can analyze the function again. */
387 memset (&local, 0, sizeof (local));
388 memset (&global, 0, sizeof (global));
389 memset (&rtl, 0, sizeof (rtl));
390 analyzed = false;
391 definition = false;
392 alias = false;
393 weakref = false;
394 cpp_implicit_alias = false;
395
396 remove_callees ();
397 remove_all_references ();
398 }
399
400 /* Return true when there are references to the node. */
401
402 bool
403 symtab_node::referred_to_p (void)
404 {
405 ipa_ref *ref = NULL;
406
407 /* See if there are any references at all. */
408 if (iterate_referring (0, ref))
409 return true;
410 /* For functions check also calls. */
411 cgraph_node *cn = dyn_cast <cgraph_node *> (this);
412 if (cn && cn->callers)
413 return true;
414 return false;
415 }
416
417 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
418 logic in effect. If NO_COLLECT is true, then our caller cannot stand to have
419 the garbage collector run at the moment. We would need to either create
420 a new GC context, or just not compile right now. */
421
422 void
423 cgraph_node::finalize_function (tree decl, bool no_collect)
424 {
425 cgraph_node *node = cgraph_node::get_create (decl);
426
427 if (node->definition)
428 {
429 /* Nested functions should only be defined once. */
430 gcc_assert (!DECL_CONTEXT (decl)
431 || TREE_CODE (DECL_CONTEXT (decl)) != FUNCTION_DECL);
432 node->reset ();
433 node->local.redefined_extern_inline = true;
434 }
435
436 notice_global_symbol (decl);
437 node->definition = true;
438 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
439
440 /* With -fkeep-inline-functions we are keeping all inline functions except
441 for extern inline ones. */
442 if (flag_keep_inline_functions
443 && DECL_DECLARED_INLINE_P (decl)
444 && !DECL_EXTERNAL (decl)
445 && !DECL_DISREGARD_INLINE_LIMITS (decl))
446 node->force_output = 1;
447
448 /* When not optimizing, also output the static functions. (see
449 PR24561), but don't do so for always_inline functions, functions
450 declared inline and nested functions. These were optimized out
451 in the original implementation and it is unclear whether we want
452 to change the behavior here. */
453 if ((!opt_for_fn (decl, optimize)
454 && !node->cpp_implicit_alias
455 && !DECL_DISREGARD_INLINE_LIMITS (decl)
456 && !DECL_DECLARED_INLINE_P (decl)
457 && !(DECL_CONTEXT (decl)
458 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL))
459 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
460 node->force_output = 1;
461
462 /* If we've not yet emitted decl, tell the debug info about it. */
463 if (!TREE_ASM_WRITTEN (decl))
464 (*debug_hooks->deferred_inline_function) (decl);
465
466 /* Possibly warn about unused parameters. */
467 if (warn_unused_parameter)
468 do_warn_unused_parameter (decl);
469
470 if (!no_collect)
471 ggc_collect ();
472
473 if (symtab->state == CONSTRUCTION
474 && (node->needed_p () || node->referred_to_p ()))
475 enqueue_node (node);
476 }
477
478 /* Add the function FNDECL to the call graph.
479 Unlike finalize_function, this function is intended to be used
480 by middle end and allows insertion of new function at arbitrary point
481 of compilation. The function can be either in high, low or SSA form
482 GIMPLE.
483
484 The function is assumed to be reachable and have address taken (so no
485 API breaking optimizations are performed on it).
486
487 Main work done by this function is to enqueue the function for later
488 processing to avoid need the passes to be re-entrant. */
489
490 void
491 cgraph_node::add_new_function (tree fndecl, bool lowered)
492 {
493 gcc::pass_manager *passes = g->get_passes ();
494 cgraph_node *node;
495 switch (symtab->state)
496 {
497 case PARSING:
498 cgraph_node::finalize_function (fndecl, false);
499 break;
500 case CONSTRUCTION:
501 /* Just enqueue function to be processed at nearest occurrence. */
502 node = cgraph_node::get_create (fndecl);
503 if (lowered)
504 node->lowered = true;
505 cgraph_new_nodes.safe_push (node);
506 break;
507
508 case IPA:
509 case IPA_SSA:
510 case EXPANSION:
511 /* Bring the function into finalized state and enqueue for later
512 analyzing and compilation. */
513 node = cgraph_node::get_create (fndecl);
514 node->local.local = false;
515 node->definition = true;
516 node->force_output = true;
517 if (!lowered && symtab->state == EXPANSION)
518 {
519 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
520 gimple_register_cfg_hooks ();
521 bitmap_obstack_initialize (NULL);
522 execute_pass_list (cfun, passes->all_lowering_passes);
523 passes->execute_early_local_passes ();
524 bitmap_obstack_release (NULL);
525 pop_cfun ();
526
527 lowered = true;
528 }
529 if (lowered)
530 node->lowered = true;
531 cgraph_new_nodes.safe_push (node);
532 break;
533
534 case FINISHED:
535 /* At the very end of compilation we have to do all the work up
536 to expansion. */
537 node = cgraph_node::create (fndecl);
538 if (lowered)
539 node->lowered = true;
540 node->definition = true;
541 node->analyze ();
542 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
543 gimple_register_cfg_hooks ();
544 bitmap_obstack_initialize (NULL);
545 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
546 g->get_passes ()->execute_early_local_passes ();
547 bitmap_obstack_release (NULL);
548 pop_cfun ();
549 node->expand ();
550 break;
551
552 default:
553 gcc_unreachable ();
554 }
555
556 /* Set a personality if required and we already passed EH lowering. */
557 if (lowered
558 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
559 == eh_personality_lang))
560 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
561 }
562
563 /* Analyze the function scheduled to be output. */
564 void
565 cgraph_node::analyze (void)
566 {
567 tree decl = this->decl;
568 location_t saved_loc = input_location;
569 input_location = DECL_SOURCE_LOCATION (decl);
570
571 if (thunk.thunk_p)
572 {
573 create_edge (cgraph_node::get (thunk.alias),
574 NULL, 0, CGRAPH_FREQ_BASE);
575 if (!expand_thunk (false, false))
576 {
577 thunk.alias = NULL;
578 return;
579 }
580 thunk.alias = NULL;
581 }
582 if (alias)
583 resolve_alias (cgraph_node::get (alias_target));
584 else if (dispatcher_function)
585 {
586 /* Generate the dispatcher body of multi-versioned functions. */
587 cgraph_function_version_info *dispatcher_version_info
588 = function_version ();
589 if (dispatcher_version_info != NULL
590 && (dispatcher_version_info->dispatcher_resolver
591 == NULL_TREE))
592 {
593 tree resolver = NULL_TREE;
594 gcc_assert (targetm.generate_version_dispatcher_body);
595 resolver = targetm.generate_version_dispatcher_body (this);
596 gcc_assert (resolver != NULL_TREE);
597 }
598 }
599 else
600 {
601 push_cfun (DECL_STRUCT_FUNCTION (decl));
602
603 assign_assembler_name_if_neeeded (decl);
604
605 /* Make sure to gimplify bodies only once. During analyzing a
606 function we lower it, which will require gimplified nested
607 functions, so we can end up here with an already gimplified
608 body. */
609 if (!gimple_has_body_p (decl))
610 gimplify_function_tree (decl);
611 dump_function (TDI_generic, decl);
612
613 /* Lower the function. */
614 if (!lowered)
615 {
616 if (nested)
617 lower_nested_functions (decl);
618 gcc_assert (!nested);
619
620 gimple_register_cfg_hooks ();
621 bitmap_obstack_initialize (NULL);
622 execute_pass_list (cfun, g->get_passes ()->all_lowering_passes);
623 free_dominance_info (CDI_POST_DOMINATORS);
624 free_dominance_info (CDI_DOMINATORS);
625 compact_blocks ();
626 bitmap_obstack_release (NULL);
627 lowered = true;
628 }
629
630 pop_cfun ();
631 }
632 analyzed = true;
633
634 input_location = saved_loc;
635 }
636
637 /* C++ frontend produce same body aliases all over the place, even before PCH
638 gets streamed out. It relies on us linking the aliases with their function
639 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
640 first produce aliases without links, but once C++ FE is sure he won't sream
641 PCH we build the links via this function. */
642
643 void
644 symbol_table::process_same_body_aliases (void)
645 {
646 symtab_node *node;
647 FOR_EACH_SYMBOL (node)
648 if (node->cpp_implicit_alias && !node->analyzed)
649 node->resolve_alias
650 (TREE_CODE (node->alias_target) == VAR_DECL
651 ? (symtab_node *)varpool_node::get_create (node->alias_target)
652 : (symtab_node *)cgraph_node::get_create (node->alias_target));
653 cpp_implicit_aliases_done = true;
654 }
655
656 /* Process attributes common for vars and functions. */
657
658 static void
659 process_common_attributes (symtab_node *node, tree decl)
660 {
661 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
662
663 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
664 {
665 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
666 "%<weakref%> attribute should be accompanied with"
667 " an %<alias%> attribute");
668 DECL_WEAK (decl) = 0;
669 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
670 DECL_ATTRIBUTES (decl));
671 }
672
673 if (lookup_attribute ("no_reorder", DECL_ATTRIBUTES (decl)))
674 node->no_reorder = 1;
675 }
676
677 /* Look for externally_visible and used attributes and mark cgraph nodes
678 accordingly.
679
680 We cannot mark the nodes at the point the attributes are processed (in
681 handle_*_attribute) because the copy of the declarations available at that
682 point may not be canonical. For example, in:
683
684 void f();
685 void f() __attribute__((used));
686
687 the declaration we see in handle_used_attribute will be the second
688 declaration -- but the front end will subsequently merge that declaration
689 with the original declaration and discard the second declaration.
690
691 Furthermore, we can't mark these nodes in finalize_function because:
692
693 void f() {}
694 void f() __attribute__((externally_visible));
695
696 is valid.
697
698 So, we walk the nodes at the end of the translation unit, applying the
699 attributes at that point. */
700
701 static void
702 process_function_and_variable_attributes (cgraph_node *first,
703 varpool_node *first_var)
704 {
705 cgraph_node *node;
706 varpool_node *vnode;
707
708 for (node = symtab->first_function (); node != first;
709 node = symtab->next_function (node))
710 {
711 tree decl = node->decl;
712 if (DECL_PRESERVE_P (decl))
713 node->mark_force_output ();
714 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
715 {
716 if (! TREE_PUBLIC (node->decl))
717 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
718 "%<externally_visible%>"
719 " attribute have effect only on public objects");
720 }
721 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
722 && (node->definition && !node->alias))
723 {
724 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
725 "%<weakref%> attribute ignored"
726 " because function is defined");
727 DECL_WEAK (decl) = 0;
728 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
729 DECL_ATTRIBUTES (decl));
730 }
731
732 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
733 && !DECL_DECLARED_INLINE_P (decl)
734 /* redefining extern inline function makes it DECL_UNINLINABLE. */
735 && !DECL_UNINLINABLE (decl))
736 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
737 "always_inline function might not be inlinable");
738
739 process_common_attributes (node, decl);
740 }
741 for (vnode = symtab->first_variable (); vnode != first_var;
742 vnode = symtab->next_variable (vnode))
743 {
744 tree decl = vnode->decl;
745 if (DECL_EXTERNAL (decl)
746 && DECL_INITIAL (decl))
747 varpool_node::finalize_decl (decl);
748 if (DECL_PRESERVE_P (decl))
749 vnode->force_output = true;
750 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
751 {
752 if (! TREE_PUBLIC (vnode->decl))
753 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
754 "%<externally_visible%>"
755 " attribute have effect only on public objects");
756 }
757 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
758 && vnode->definition
759 && DECL_INITIAL (decl))
760 {
761 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
762 "%<weakref%> attribute ignored"
763 " because variable is initialized");
764 DECL_WEAK (decl) = 0;
765 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
766 DECL_ATTRIBUTES (decl));
767 }
768 process_common_attributes (vnode, decl);
769 }
770 }
771
772 /* Mark DECL as finalized. By finalizing the declaration, frontend instruct the
773 middle end to output the variable to asm file, if needed or externally
774 visible. */
775
776 void
777 varpool_node::finalize_decl (tree decl)
778 {
779 varpool_node *node = varpool_node::get_create (decl);
780
781 gcc_assert (TREE_STATIC (decl) || DECL_EXTERNAL (decl));
782
783 if (node->definition)
784 return;
785 notice_global_symbol (decl);
786 node->definition = true;
787 if (TREE_THIS_VOLATILE (decl) || DECL_PRESERVE_P (decl)
788 /* Traditionally we do not eliminate static variables when not
789 optimizing and when not doing toplevel reoder. */
790 || node->no_reorder
791 || ((!flag_toplevel_reorder
792 && !DECL_COMDAT (node->decl)
793 && !DECL_ARTIFICIAL (node->decl))))
794 node->force_output = true;
795
796 if (symtab->state == CONSTRUCTION
797 && (node->needed_p () || node->referred_to_p ()))
798 enqueue_node (node);
799 if (symtab->state >= IPA_SSA)
800 node->analyze ();
801 /* Some frontends produce various interface variables after compilation
802 finished. */
803 if (symtab->state == FINISHED
804 || (!flag_toplevel_reorder
805 && symtab->state == EXPANSION))
806 node->assemble_decl ();
807
808 if (DECL_INITIAL (decl))
809 chkp_register_var_initializer (decl);
810 }
811
812 /* EDGE is an polymorphic call. Mark all possible targets as reachable
813 and if there is only one target, perform trivial devirtualization.
814 REACHABLE_CALL_TARGETS collects target lists we already walked to
815 avoid udplicate work. */
816
817 static void
818 walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
819 cgraph_edge *edge)
820 {
821 unsigned int i;
822 void *cache_token;
823 bool final;
824 vec <cgraph_node *>targets
825 = possible_polymorphic_call_targets
826 (edge, &final, &cache_token);
827
828 if (!reachable_call_targets->add (cache_token))
829 {
830 if (symtab->dump_file)
831 dump_possible_polymorphic_call_targets
832 (symtab->dump_file, edge);
833
834 for (i = 0; i < targets.length (); i++)
835 {
836 /* Do not bother to mark virtual methods in anonymous namespace;
837 either we will find use of virtual table defining it, or it is
838 unused. */
839 if (targets[i]->definition
840 && TREE_CODE
841 (TREE_TYPE (targets[i]->decl))
842 == METHOD_TYPE
843 && !type_in_anonymous_namespace_p
844 (method_class_type
845 (TREE_TYPE (targets[i]->decl))))
846 enqueue_node (targets[i]);
847 }
848 }
849
850 /* Very trivial devirtualization; when the type is
851 final or anonymous (so we know all its derivation)
852 and there is only one possible virtual call target,
853 make the edge direct. */
854 if (final)
855 {
856 if (targets.length () <= 1 && dbg_cnt (devirt))
857 {
858 cgraph_node *target;
859 if (targets.length () == 1)
860 target = targets[0];
861 else
862 target = cgraph_node::create
863 (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
864
865 if (symtab->dump_file)
866 {
867 fprintf (symtab->dump_file,
868 "Devirtualizing call: ");
869 print_gimple_stmt (symtab->dump_file,
870 edge->call_stmt, 0,
871 TDF_SLIM);
872 }
873 if (dump_enabled_p ())
874 {
875 location_t locus = gimple_location_safe (edge->call_stmt);
876 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
877 "devirtualizing call in %s to %s\n",
878 edge->caller->name (), target->name ());
879 }
880
881 edge->make_direct (target);
882 edge->redirect_call_stmt_to_callee ();
883
884 /* Call to __builtin_unreachable shouldn't be instrumented. */
885 if (!targets.length ())
886 gimple_call_set_with_bounds (edge->call_stmt, false);
887
888 if (symtab->dump_file)
889 {
890 fprintf (symtab->dump_file,
891 "Devirtualized as: ");
892 print_gimple_stmt (symtab->dump_file,
893 edge->call_stmt, 0,
894 TDF_SLIM);
895 }
896 }
897 }
898 }
899
900
901 /* Discover all functions and variables that are trivially needed, analyze
902 them as well as all functions and variables referred by them */
903 static cgraph_node *first_analyzed;
904 static varpool_node *first_analyzed_var;
905
906 static void
907 analyze_functions (void)
908 {
909 /* Keep track of already processed nodes when called multiple times for
910 intermodule optimization. */
911 cgraph_node *first_handled = first_analyzed;
912 varpool_node *first_handled_var = first_analyzed_var;
913 hash_set<void *> reachable_call_targets;
914
915 symtab_node *node;
916 symtab_node *next;
917 int i;
918 ipa_ref *ref;
919 bool changed = true;
920 location_t saved_loc = input_location;
921
922 bitmap_obstack_initialize (NULL);
923 symtab->state = CONSTRUCTION;
924 input_location = UNKNOWN_LOCATION;
925
926 /* Ugly, but the fixup can not happen at a time same body alias is created;
927 C++ FE is confused about the COMDAT groups being right. */
928 if (symtab->cpp_implicit_aliases_done)
929 FOR_EACH_SYMBOL (node)
930 if (node->cpp_implicit_alias)
931 node->fixup_same_cpp_alias_visibility (node->get_alias_target ());
932 build_type_inheritance_graph ();
933
934 /* Analysis adds static variables that in turn adds references to new functions.
935 So we need to iterate the process until it stabilize. */
936 while (changed)
937 {
938 changed = false;
939 process_function_and_variable_attributes (first_analyzed,
940 first_analyzed_var);
941
942 /* First identify the trivially needed symbols. */
943 for (node = symtab->first_symbol ();
944 node != first_analyzed
945 && node != first_analyzed_var; node = node->next)
946 {
947 /* Convert COMDAT group designators to IDENTIFIER_NODEs. */
948 node->get_comdat_group_id ();
949 if (node->needed_p ())
950 {
951 enqueue_node (node);
952 if (!changed && symtab->dump_file)
953 fprintf (symtab->dump_file, "Trivially needed symbols:");
954 changed = true;
955 if (symtab->dump_file)
956 fprintf (symtab->dump_file, " %s", node->asm_name ());
957 if (!changed && symtab->dump_file)
958 fprintf (symtab->dump_file, "\n");
959 }
960 if (node == first_analyzed
961 || node == first_analyzed_var)
962 break;
963 }
964 symtab->process_new_functions ();
965 first_analyzed_var = symtab->first_variable ();
966 first_analyzed = symtab->first_function ();
967
968 if (changed && symtab->dump_file)
969 fprintf (symtab->dump_file, "\n");
970
971 /* Lower representation, build callgraph edges and references for all trivially
972 needed symbols and all symbols referred by them. */
973 while (queued_nodes != &symtab_terminator)
974 {
975 changed = true;
976 node = queued_nodes;
977 queued_nodes = (symtab_node *)queued_nodes->aux;
978 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
979 if (cnode && cnode->definition)
980 {
981 cgraph_edge *edge;
982 tree decl = cnode->decl;
983
984 /* ??? It is possible to create extern inline function
985 and later using weak alias attribute to kill its body.
986 See gcc.c-torture/compile/20011119-1.c */
987 if (!DECL_STRUCT_FUNCTION (decl)
988 && !cnode->alias
989 && !cnode->thunk.thunk_p
990 && !cnode->dispatcher_function)
991 {
992 cnode->reset ();
993 cnode->local.redefined_extern_inline = true;
994 continue;
995 }
996
997 if (!cnode->analyzed)
998 cnode->analyze ();
999
1000 for (edge = cnode->callees; edge; edge = edge->next_callee)
1001 if (edge->callee->definition)
1002 enqueue_node (edge->callee);
1003 if (opt_for_fn (cnode->decl, optimize)
1004 && opt_for_fn (cnode->decl, flag_devirtualize))
1005 {
1006 cgraph_edge *next;
1007
1008 for (edge = cnode->indirect_calls; edge; edge = next)
1009 {
1010 next = edge->next_callee;
1011 if (edge->indirect_info->polymorphic)
1012 walk_polymorphic_call_targets (&reachable_call_targets,
1013 edge);
1014 }
1015 }
1016
1017 /* If decl is a clone of an abstract function,
1018 mark that abstract function so that we don't release its body.
1019 The DECL_INITIAL() of that abstract function declaration
1020 will be later needed to output debug info. */
1021 if (DECL_ABSTRACT_ORIGIN (decl))
1022 {
1023 cgraph_node *origin_node
1024 = cgraph_node::get_create (DECL_ABSTRACT_ORIGIN (decl));
1025 origin_node->used_as_abstract_origin = true;
1026 }
1027 }
1028 else
1029 {
1030 varpool_node *vnode = dyn_cast <varpool_node *> (node);
1031 if (vnode && vnode->definition && !vnode->analyzed)
1032 vnode->analyze ();
1033 }
1034
1035 if (node->same_comdat_group)
1036 {
1037 symtab_node *next;
1038 for (next = node->same_comdat_group;
1039 next != node;
1040 next = next->same_comdat_group)
1041 enqueue_node (next);
1042 }
1043 for (i = 0; node->iterate_reference (i, ref); i++)
1044 if (ref->referred->definition)
1045 enqueue_node (ref->referred);
1046 symtab->process_new_functions ();
1047 }
1048 }
1049 update_type_inheritance_graph ();
1050
1051 /* Collect entry points to the unit. */
1052 if (symtab->dump_file)
1053 {
1054 fprintf (symtab->dump_file, "\n\nInitial ");
1055 symtab_node::dump_table (symtab->dump_file);
1056 }
1057
1058 if (symtab->dump_file)
1059 fprintf (symtab->dump_file, "\nRemoving unused symbols:");
1060
1061 for (node = symtab->first_symbol ();
1062 node != first_handled
1063 && node != first_handled_var; node = next)
1064 {
1065 next = node->next;
1066 if (!node->aux && !node->referred_to_p ())
1067 {
1068 if (symtab->dump_file)
1069 fprintf (symtab->dump_file, " %s", node->name ());
1070 node->remove ();
1071 continue;
1072 }
1073 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1074 {
1075 tree decl = node->decl;
1076
1077 if (cnode->definition && !gimple_has_body_p (decl)
1078 && !cnode->alias
1079 && !cnode->thunk.thunk_p)
1080 cnode->reset ();
1081
1082 gcc_assert (!cnode->definition || cnode->thunk.thunk_p
1083 || cnode->alias
1084 || gimple_has_body_p (decl));
1085 gcc_assert (cnode->analyzed == cnode->definition);
1086 }
1087 node->aux = NULL;
1088 }
1089 for (;node; node = node->next)
1090 node->aux = NULL;
1091 first_analyzed = symtab->first_function ();
1092 first_analyzed_var = symtab->first_variable ();
1093 if (symtab->dump_file)
1094 {
1095 fprintf (symtab->dump_file, "\n\nReclaimed ");
1096 symtab_node::dump_table (symtab->dump_file);
1097 }
1098 bitmap_obstack_release (NULL);
1099 ggc_collect ();
1100 /* Initialize assembler name hash, in particular we want to trigger C++
1101 mangling and same body alias creation before we free DECL_ARGUMENTS
1102 used by it. */
1103 if (!seen_error ())
1104 symtab->symtab_initialize_asm_name_hash ();
1105
1106 input_location = saved_loc;
1107 }
1108
1109 /* Translate the ugly representation of aliases as alias pairs into nice
1110 representation in callgraph. We don't handle all cases yet,
1111 unfortunately. */
1112
1113 static void
1114 handle_alias_pairs (void)
1115 {
1116 alias_pair *p;
1117 unsigned i;
1118
1119 for (i = 0; alias_pairs && alias_pairs->iterate (i, &p);)
1120 {
1121 symtab_node *target_node = symtab_node::get_for_asmname (p->target);
1122
1123 /* Weakrefs with target not defined in current unit are easy to handle:
1124 they behave just as external variables except we need to note the
1125 alias flag to later output the weakref pseudo op into asm file. */
1126 if (!target_node
1127 && lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL)
1128 {
1129 symtab_node *node = symtab_node::get (p->decl);
1130 if (node)
1131 {
1132 node->alias_target = p->target;
1133 node->weakref = true;
1134 node->alias = true;
1135 }
1136 alias_pairs->unordered_remove (i);
1137 continue;
1138 }
1139 else if (!target_node)
1140 {
1141 error ("%q+D aliased to undefined symbol %qE", p->decl, p->target);
1142 symtab_node *node = symtab_node::get (p->decl);
1143 if (node)
1144 node->alias = false;
1145 alias_pairs->unordered_remove (i);
1146 continue;
1147 }
1148
1149 if (DECL_EXTERNAL (target_node->decl)
1150 /* We use local aliases for C++ thunks to force the tailcall
1151 to bind locally. This is a hack - to keep it working do
1152 the following (which is not strictly correct). */
1153 && (TREE_CODE (target_node->decl) != FUNCTION_DECL
1154 || ! DECL_VIRTUAL_P (target_node->decl))
1155 && ! lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)))
1156 {
1157 error ("%q+D aliased to external symbol %qE",
1158 p->decl, p->target);
1159 }
1160
1161 if (TREE_CODE (p->decl) == FUNCTION_DECL
1162 && target_node && is_a <cgraph_node *> (target_node))
1163 {
1164 cgraph_node *src_node = cgraph_node::get (p->decl);
1165 if (src_node && src_node->definition)
1166 src_node->reset ();
1167 cgraph_node::create_alias (p->decl, target_node->decl);
1168 alias_pairs->unordered_remove (i);
1169 }
1170 else if (TREE_CODE (p->decl) == VAR_DECL
1171 && target_node && is_a <varpool_node *> (target_node))
1172 {
1173 varpool_node::create_alias (p->decl, target_node->decl);
1174 alias_pairs->unordered_remove (i);
1175 }
1176 else
1177 {
1178 error ("%q+D alias in between function and variable is not supported",
1179 p->decl);
1180 warning (0, "%q+D aliased declaration",
1181 target_node->decl);
1182 alias_pairs->unordered_remove (i);
1183 }
1184 }
1185 vec_free (alias_pairs);
1186 }
1187
1188
1189 /* Figure out what functions we want to assemble. */
1190
1191 static void
1192 mark_functions_to_output (void)
1193 {
1194 cgraph_node *node;
1195 #ifdef ENABLE_CHECKING
1196 bool check_same_comdat_groups = false;
1197
1198 FOR_EACH_FUNCTION (node)
1199 gcc_assert (!node->process);
1200 #endif
1201
1202 FOR_EACH_FUNCTION (node)
1203 {
1204 tree decl = node->decl;
1205
1206 gcc_assert (!node->process || node->same_comdat_group);
1207 if (node->process)
1208 continue;
1209
1210 /* We need to output all local functions that are used and not
1211 always inlined, as well as those that are reachable from
1212 outside the current compilation unit. */
1213 if (node->analyzed
1214 && !node->thunk.thunk_p
1215 && !node->alias
1216 && !node->global.inlined_to
1217 && !TREE_ASM_WRITTEN (decl)
1218 && !DECL_EXTERNAL (decl))
1219 {
1220 node->process = 1;
1221 if (node->same_comdat_group)
1222 {
1223 cgraph_node *next;
1224 for (next = dyn_cast<cgraph_node *> (node->same_comdat_group);
1225 next != node;
1226 next = dyn_cast<cgraph_node *> (next->same_comdat_group))
1227 if (!next->thunk.thunk_p && !next->alias
1228 && !next->comdat_local_p ())
1229 next->process = 1;
1230 }
1231 }
1232 else if (node->same_comdat_group)
1233 {
1234 #ifdef ENABLE_CHECKING
1235 check_same_comdat_groups = true;
1236 #endif
1237 }
1238 else
1239 {
1240 /* We should've reclaimed all functions that are not needed. */
1241 #ifdef ENABLE_CHECKING
1242 if (!node->global.inlined_to
1243 && gimple_has_body_p (decl)
1244 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1245 are inside partition, we can end up not removing the body since we no longer
1246 have analyzed node pointing to it. */
1247 && !node->in_other_partition
1248 && !node->alias
1249 && !node->clones
1250 && !DECL_EXTERNAL (decl))
1251 {
1252 node->debug ();
1253 internal_error ("failed to reclaim unneeded function");
1254 }
1255 #endif
1256 gcc_assert (node->global.inlined_to
1257 || !gimple_has_body_p (decl)
1258 || node->in_other_partition
1259 || node->clones
1260 || DECL_ARTIFICIAL (decl)
1261 || DECL_EXTERNAL (decl));
1262
1263 }
1264
1265 }
1266 #ifdef ENABLE_CHECKING
1267 if (check_same_comdat_groups)
1268 FOR_EACH_FUNCTION (node)
1269 if (node->same_comdat_group && !node->process)
1270 {
1271 tree decl = node->decl;
1272 if (!node->global.inlined_to
1273 && gimple_has_body_p (decl)
1274 /* FIXME: in an ltrans unit when the offline copy is outside a
1275 partition but inline copies are inside a partition, we can
1276 end up not removing the body since we no longer have an
1277 analyzed node pointing to it. */
1278 && !node->in_other_partition
1279 && !node->clones
1280 && !DECL_EXTERNAL (decl))
1281 {
1282 node->debug ();
1283 internal_error ("failed to reclaim unneeded function in same "
1284 "comdat group");
1285 }
1286 }
1287 #endif
1288 }
1289
1290 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1291 in lowered gimple form. IN_SSA is true if the gimple is in SSA.
1292
1293 Set current_function_decl and cfun to newly constructed empty function body.
1294 return basic block in the function body. */
1295
1296 basic_block
1297 init_lowered_empty_function (tree decl, bool in_ssa)
1298 {
1299 basic_block bb;
1300
1301 current_function_decl = decl;
1302 allocate_struct_function (decl, false);
1303 gimple_register_cfg_hooks ();
1304 init_empty_tree_cfg ();
1305
1306 if (in_ssa)
1307 {
1308 init_tree_ssa (cfun);
1309 init_ssa_operands (cfun);
1310 cfun->gimple_df->in_ssa_p = true;
1311 cfun->curr_properties |= PROP_ssa;
1312 }
1313
1314 DECL_INITIAL (decl) = make_node (BLOCK);
1315
1316 DECL_SAVED_TREE (decl) = error_mark_node;
1317 cfun->curr_properties |= (PROP_gimple_lcf | PROP_gimple_leh | PROP_gimple_any
1318 | PROP_cfg | PROP_loops);
1319
1320 set_loops_for_fn (cfun, ggc_cleared_alloc<loops> ());
1321 init_loops_structure (cfun, loops_for_fn (cfun), 1);
1322 loops_for_fn (cfun)->state |= LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
1323
1324 /* Create BB for body of the function and connect it properly. */
1325 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR_FOR_FN (cfun));
1326 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, EDGE_FALLTHRU);
1327 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1328 add_bb_to_loop (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
1329
1330 return bb;
1331 }
1332
1333 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1334 offset indicated by VIRTUAL_OFFSET, if that is
1335 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1336 zero for a result adjusting thunk. */
1337
1338 static tree
1339 thunk_adjust (gimple_stmt_iterator * bsi,
1340 tree ptr, bool this_adjusting,
1341 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1342 {
1343 gimple stmt;
1344 tree ret;
1345
1346 if (this_adjusting
1347 && fixed_offset != 0)
1348 {
1349 stmt = gimple_build_assign
1350 (ptr, fold_build_pointer_plus_hwi_loc (input_location,
1351 ptr,
1352 fixed_offset));
1353 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1354 }
1355
1356 /* If there's a virtual offset, look up that value in the vtable and
1357 adjust the pointer again. */
1358 if (virtual_offset)
1359 {
1360 tree vtabletmp;
1361 tree vtabletmp2;
1362 tree vtabletmp3;
1363
1364 if (!vtable_entry_type)
1365 {
1366 tree vfunc_type = make_node (FUNCTION_TYPE);
1367 TREE_TYPE (vfunc_type) = integer_type_node;
1368 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1369 layout_type (vfunc_type);
1370
1371 vtable_entry_type = build_pointer_type (vfunc_type);
1372 }
1373
1374 vtabletmp =
1375 create_tmp_reg (build_pointer_type
1376 (build_pointer_type (vtable_entry_type)), "vptr");
1377
1378 /* The vptr is always at offset zero in the object. */
1379 stmt = gimple_build_assign (vtabletmp,
1380 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1381 ptr));
1382 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1383
1384 /* Form the vtable address. */
1385 vtabletmp2 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp)),
1386 "vtableaddr");
1387 stmt = gimple_build_assign (vtabletmp2,
1388 build_simple_mem_ref (vtabletmp));
1389 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1390
1391 /* Find the entry with the vcall offset. */
1392 stmt = gimple_build_assign (vtabletmp2,
1393 fold_build_pointer_plus_loc (input_location,
1394 vtabletmp2,
1395 virtual_offset));
1396 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1397
1398 /* Get the offset itself. */
1399 vtabletmp3 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1400 "vcalloffset");
1401 stmt = gimple_build_assign (vtabletmp3,
1402 build_simple_mem_ref (vtabletmp2));
1403 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1404
1405 /* Adjust the `this' pointer. */
1406 ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
1407 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1408 GSI_CONTINUE_LINKING);
1409 }
1410
1411 if (!this_adjusting
1412 && fixed_offset != 0)
1413 /* Adjust the pointer by the constant. */
1414 {
1415 tree ptrtmp;
1416
1417 if (TREE_CODE (ptr) == VAR_DECL)
1418 ptrtmp = ptr;
1419 else
1420 {
1421 ptrtmp = create_tmp_reg (TREE_TYPE (ptr), "ptr");
1422 stmt = gimple_build_assign (ptrtmp, ptr);
1423 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1424 }
1425 ptr = fold_build_pointer_plus_hwi_loc (input_location,
1426 ptrtmp, fixed_offset);
1427 }
1428
1429 /* Emit the statement and gimplify the adjustment expression. */
1430 ret = create_tmp_reg (TREE_TYPE (ptr), "adjusted_this");
1431 stmt = gimple_build_assign (ret, ptr);
1432 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1433
1434 return ret;
1435 }
1436
1437 /* Expand thunk NODE to gimple if possible.
1438 When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
1439 no assembler is produced.
1440 When OUTPUT_ASM_THUNK is true, also produce assembler for
1441 thunks that are not lowered. */
1442
1443 bool
1444 cgraph_node::expand_thunk (bool output_asm_thunks, bool force_gimple_thunk)
1445 {
1446 bool this_adjusting = thunk.this_adjusting;
1447 HOST_WIDE_INT fixed_offset = thunk.fixed_offset;
1448 HOST_WIDE_INT virtual_value = thunk.virtual_value;
1449 tree virtual_offset = NULL;
1450 tree alias = callees->callee->decl;
1451 tree thunk_fndecl = decl;
1452 tree a;
1453
1454
1455 if (!force_gimple_thunk && this_adjusting
1456 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1457 virtual_value, alias))
1458 {
1459 const char *fnname;
1460 tree fn_block;
1461 tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1462
1463 if (!output_asm_thunks)
1464 {
1465 analyzed = true;
1466 return false;
1467 }
1468
1469 if (in_lto_p)
1470 get_untransformed_body ();
1471 a = DECL_ARGUMENTS (thunk_fndecl);
1472
1473 current_function_decl = thunk_fndecl;
1474
1475 /* Ensure thunks are emitted in their correct sections. */
1476 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1477
1478 DECL_RESULT (thunk_fndecl)
1479 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1480 RESULT_DECL, 0, restype);
1481 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1482 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1483
1484 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1485 create one. */
1486 fn_block = make_node (BLOCK);
1487 BLOCK_VARS (fn_block) = a;
1488 DECL_INITIAL (thunk_fndecl) = fn_block;
1489 init_function_start (thunk_fndecl);
1490 cfun->is_thunk = 1;
1491 insn_locations_init ();
1492 set_curr_insn_location (DECL_SOURCE_LOCATION (thunk_fndecl));
1493 prologue_location = curr_insn_location ();
1494 assemble_start_function (thunk_fndecl, fnname);
1495
1496 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1497 fixed_offset, virtual_value, alias);
1498
1499 assemble_end_function (thunk_fndecl, fnname);
1500 insn_locations_finalize ();
1501 init_insn_lengths ();
1502 free_after_compilation (cfun);
1503 set_cfun (NULL);
1504 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1505 thunk.thunk_p = false;
1506 analyzed = false;
1507 }
1508 else
1509 {
1510 tree restype;
1511 basic_block bb, then_bb, else_bb, return_bb;
1512 gimple_stmt_iterator bsi;
1513 int nargs = 0;
1514 tree arg;
1515 int i;
1516 tree resdecl;
1517 tree restmp = NULL;
1518
1519 gimple call;
1520 gimple ret;
1521
1522 if (in_lto_p)
1523 get_untransformed_body ();
1524 a = DECL_ARGUMENTS (thunk_fndecl);
1525
1526 current_function_decl = thunk_fndecl;
1527
1528 /* Ensure thunks are emitted in their correct sections. */
1529 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1530
1531 DECL_IGNORED_P (thunk_fndecl) = 1;
1532 bitmap_obstack_initialize (NULL);
1533
1534 if (thunk.virtual_offset_p)
1535 virtual_offset = size_int (virtual_value);
1536
1537 /* Build the return declaration for the function. */
1538 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1539 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1540 {
1541 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1542 DECL_ARTIFICIAL (resdecl) = 1;
1543 DECL_IGNORED_P (resdecl) = 1;
1544 DECL_RESULT (thunk_fndecl) = resdecl;
1545 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1546 }
1547 else
1548 resdecl = DECL_RESULT (thunk_fndecl);
1549
1550 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl, true);
1551
1552 bsi = gsi_start_bb (bb);
1553
1554 /* Build call to the function being thunked. */
1555 if (!VOID_TYPE_P (restype))
1556 {
1557 if (DECL_BY_REFERENCE (resdecl))
1558 {
1559 restmp = gimple_fold_indirect_ref (resdecl);
1560 if (!restmp)
1561 restmp = build2 (MEM_REF,
1562 TREE_TYPE (TREE_TYPE (DECL_RESULT (alias))),
1563 resdecl,
1564 build_int_cst (TREE_TYPE
1565 (DECL_RESULT (alias)), 0));
1566 }
1567 else if (!is_gimple_reg_type (restype))
1568 {
1569 restmp = resdecl;
1570
1571 if (TREE_CODE (restmp) == VAR_DECL)
1572 add_local_decl (cfun, restmp);
1573 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1574 }
1575 else
1576 restmp = create_tmp_reg (restype, "retval");
1577 }
1578
1579 for (arg = a; arg; arg = DECL_CHAIN (arg))
1580 nargs++;
1581 auto_vec<tree> vargs (nargs);
1582 if (this_adjusting)
1583 vargs.quick_push (thunk_adjust (&bsi, a, 1, fixed_offset,
1584 virtual_offset));
1585 else if (nargs)
1586 vargs.quick_push (a);
1587
1588 if (nargs)
1589 for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
1590 {
1591 tree tmp = arg;
1592 if (!is_gimple_val (arg))
1593 {
1594 tmp = create_tmp_reg (TYPE_MAIN_VARIANT
1595 (TREE_TYPE (arg)), "arg");
1596 gimple stmt = gimple_build_assign (tmp, arg);
1597 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1598 }
1599 vargs.quick_push (tmp);
1600 }
1601 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1602 callees->call_stmt = call;
1603 gimple_call_set_from_thunk (call, true);
1604 gimple_call_set_with_bounds (call, instrumentation_clone);
1605 if (restmp)
1606 {
1607 gimple_call_set_lhs (call, restmp);
1608 gcc_assert (useless_type_conversion_p (TREE_TYPE (restmp),
1609 TREE_TYPE (TREE_TYPE (alias))));
1610 }
1611 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1612 if (!(gimple_call_flags (call) & ECF_NORETURN))
1613 {
1614 if (restmp && !this_adjusting
1615 && (fixed_offset || virtual_offset))
1616 {
1617 tree true_label = NULL_TREE;
1618
1619 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1620 {
1621 gimple stmt;
1622 /* If the return type is a pointer, we need to
1623 protect against NULL. We know there will be an
1624 adjustment, because that's why we're emitting a
1625 thunk. */
1626 then_bb = create_basic_block (NULL, (void *) 0, bb);
1627 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1628 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1629 add_bb_to_loop (then_bb, bb->loop_father);
1630 add_bb_to_loop (return_bb, bb->loop_father);
1631 add_bb_to_loop (else_bb, bb->loop_father);
1632 remove_edge (single_succ_edge (bb));
1633 true_label = gimple_block_label (then_bb);
1634 stmt = gimple_build_cond (NE_EXPR, restmp,
1635 build_zero_cst (TREE_TYPE (restmp)),
1636 NULL_TREE, NULL_TREE);
1637 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1638 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1639 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1640 make_edge (return_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1641 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1642 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1643 bsi = gsi_last_bb (then_bb);
1644 }
1645
1646 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1647 fixed_offset, virtual_offset);
1648 if (true_label)
1649 {
1650 gimple stmt;
1651 bsi = gsi_last_bb (else_bb);
1652 stmt = gimple_build_assign (restmp,
1653 build_zero_cst (TREE_TYPE (restmp)));
1654 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1655 bsi = gsi_last_bb (return_bb);
1656 }
1657 }
1658 else
1659 gimple_call_set_tail (call, true);
1660
1661 /* Build return value. */
1662 if (!DECL_BY_REFERENCE (resdecl))
1663 ret = gimple_build_return (restmp);
1664 else
1665 ret = gimple_build_return (resdecl);
1666
1667 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1668 }
1669 else
1670 {
1671 gimple_call_set_tail (call, true);
1672 remove_edge (single_succ_edge (bb));
1673 }
1674
1675 cfun->gimple_df->in_ssa_p = true;
1676 /* FIXME: C++ FE should stop setting TREE_ASM_WRITTEN on thunks. */
1677 TREE_ASM_WRITTEN (thunk_fndecl) = false;
1678 delete_unreachable_blocks ();
1679 update_ssa (TODO_update_ssa);
1680 #ifdef ENABLE_CHECKING
1681 verify_flow_info ();
1682 #endif
1683 free_dominance_info (CDI_DOMINATORS);
1684
1685 /* Since we want to emit the thunk, we explicitly mark its name as
1686 referenced. */
1687 thunk.thunk_p = false;
1688 lowered = true;
1689 bitmap_obstack_release (NULL);
1690 }
1691 current_function_decl = NULL;
1692 set_cfun (NULL);
1693 return true;
1694 }
1695
1696 /* Assemble thunks and aliases associated to node. */
1697
1698 void
1699 cgraph_node::assemble_thunks_and_aliases (void)
1700 {
1701 cgraph_edge *e;
1702 ipa_ref *ref;
1703
1704 for (e = callers; e;)
1705 if (e->caller->thunk.thunk_p
1706 && !e->caller->thunk.add_pointer_bounds_args)
1707 {
1708 cgraph_node *thunk = e->caller;
1709
1710 e = e->next_caller;
1711 thunk->expand_thunk (true, false);
1712 thunk->assemble_thunks_and_aliases ();
1713 }
1714 else
1715 e = e->next_caller;
1716
1717 FOR_EACH_ALIAS (this, ref)
1718 {
1719 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
1720 bool saved_written = TREE_ASM_WRITTEN (decl);
1721
1722 /* Force assemble_alias to really output the alias this time instead
1723 of buffering it in same alias pairs. */
1724 TREE_ASM_WRITTEN (decl) = 1;
1725 do_assemble_alias (alias->decl,
1726 DECL_ASSEMBLER_NAME (decl));
1727 alias->assemble_thunks_and_aliases ();
1728 TREE_ASM_WRITTEN (decl) = saved_written;
1729 }
1730 }
1731
1732 /* Expand function specified by node. */
1733
1734 void
1735 cgraph_node::expand (void)
1736 {
1737 location_t saved_loc;
1738
1739 /* We ought to not compile any inline clones. */
1740 gcc_assert (!global.inlined_to);
1741
1742 announce_function (decl);
1743 process = 0;
1744 gcc_assert (lowered);
1745 get_untransformed_body ();
1746
1747 /* Generate RTL for the body of DECL. */
1748
1749 timevar_push (TV_REST_OF_COMPILATION);
1750
1751 gcc_assert (symtab->global_info_ready);
1752
1753 /* Initialize the default bitmap obstack. */
1754 bitmap_obstack_initialize (NULL);
1755
1756 /* Initialize the RTL code for the function. */
1757 current_function_decl = decl;
1758 saved_loc = input_location;
1759 input_location = DECL_SOURCE_LOCATION (decl);
1760 init_function_start (decl);
1761
1762 gimple_register_cfg_hooks ();
1763
1764 bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
1765
1766 execute_all_ipa_transforms ();
1767
1768 /* Perform all tree transforms and optimizations. */
1769
1770 /* Signal the start of passes. */
1771 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START, NULL);
1772
1773 execute_pass_list (cfun, g->get_passes ()->all_passes);
1774
1775 /* Signal the end of passes. */
1776 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END, NULL);
1777
1778 bitmap_obstack_release (&reg_obstack);
1779
1780 /* Release the default bitmap obstack. */
1781 bitmap_obstack_release (NULL);
1782
1783 /* If requested, warn about function definitions where the function will
1784 return a value (usually of some struct or union type) which itself will
1785 take up a lot of stack space. */
1786 if (warn_larger_than && !DECL_EXTERNAL (decl) && TREE_TYPE (decl))
1787 {
1788 tree ret_type = TREE_TYPE (TREE_TYPE (decl));
1789
1790 if (ret_type && TYPE_SIZE_UNIT (ret_type)
1791 && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
1792 && 0 < compare_tree_int (TYPE_SIZE_UNIT (ret_type),
1793 larger_than_size))
1794 {
1795 unsigned int size_as_int
1796 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
1797
1798 if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
1799 warning (OPT_Wlarger_than_, "size of return value of %q+D is %u bytes",
1800 decl, size_as_int);
1801 else
1802 warning (OPT_Wlarger_than_, "size of return value of %q+D is larger than %wd bytes",
1803 decl, larger_than_size);
1804 }
1805 }
1806
1807 gimple_set_body (decl, NULL);
1808 if (DECL_STRUCT_FUNCTION (decl) == 0
1809 && !cgraph_node::get (decl)->origin)
1810 {
1811 /* Stop pointing to the local nodes about to be freed.
1812 But DECL_INITIAL must remain nonzero so we know this
1813 was an actual function definition.
1814 For a nested function, this is done in c_pop_function_context.
1815 If rest_of_compilation set this to 0, leave it 0. */
1816 if (DECL_INITIAL (decl) != 0)
1817 DECL_INITIAL (decl) = error_mark_node;
1818 }
1819
1820 input_location = saved_loc;
1821
1822 ggc_collect ();
1823 timevar_pop (TV_REST_OF_COMPILATION);
1824
1825 /* Make sure that BE didn't give up on compiling. */
1826 gcc_assert (TREE_ASM_WRITTEN (decl));
1827 set_cfun (NULL);
1828 current_function_decl = NULL;
1829
1830 /* It would make a lot more sense to output thunks before function body to get more
1831 forward and lest backwarding jumps. This however would need solving problem
1832 with comdats. See PR48668. Also aliases must come after function itself to
1833 make one pass assemblers, like one on AIX, happy. See PR 50689.
1834 FIXME: Perhaps thunks should be move before function IFF they are not in comdat
1835 groups. */
1836 assemble_thunks_and_aliases ();
1837 release_body ();
1838 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1839 points to the dead function body. */
1840 remove_callees ();
1841 remove_all_references ();
1842 }
1843
1844 /* Node comparer that is responsible for the order that corresponds
1845 to time when a function was launched for the first time. */
1846
1847 static int
1848 node_cmp (const void *pa, const void *pb)
1849 {
1850 const cgraph_node *a = *(const cgraph_node * const *) pa;
1851 const cgraph_node *b = *(const cgraph_node * const *) pb;
1852
1853 /* Functions with time profile must be before these without profile. */
1854 if (!a->tp_first_run || !b->tp_first_run)
1855 return a->tp_first_run - b->tp_first_run;
1856
1857 return a->tp_first_run != b->tp_first_run
1858 ? b->tp_first_run - a->tp_first_run
1859 : b->order - a->order;
1860 }
1861
1862 /* Expand all functions that must be output.
1863
1864 Attempt to topologically sort the nodes so function is output when
1865 all called functions are already assembled to allow data to be
1866 propagated across the callgraph. Use a stack to get smaller distance
1867 between a function and its callees (later we may choose to use a more
1868 sophisticated algorithm for function reordering; we will likely want
1869 to use subsections to make the output functions appear in top-down
1870 order). */
1871
1872 static void
1873 expand_all_functions (void)
1874 {
1875 cgraph_node *node;
1876 cgraph_node **order = XCNEWVEC (cgraph_node *,
1877 symtab->cgraph_count);
1878 unsigned int expanded_func_count = 0, profiled_func_count = 0;
1879 int order_pos, new_order_pos = 0;
1880 int i;
1881
1882 order_pos = ipa_reverse_postorder (order);
1883 gcc_assert (order_pos == symtab->cgraph_count);
1884
1885 /* Garbage collector may remove inline clones we eliminate during
1886 optimization. So we must be sure to not reference them. */
1887 for (i = 0; i < order_pos; i++)
1888 if (order[i]->process)
1889 order[new_order_pos++] = order[i];
1890
1891 if (flag_profile_reorder_functions)
1892 qsort (order, new_order_pos, sizeof (cgraph_node *), node_cmp);
1893
1894 for (i = new_order_pos - 1; i >= 0; i--)
1895 {
1896 node = order[i];
1897
1898 if (node->process)
1899 {
1900 expanded_func_count++;
1901 if(node->tp_first_run)
1902 profiled_func_count++;
1903
1904 if (symtab->dump_file)
1905 fprintf (symtab->dump_file,
1906 "Time profile order in expand_all_functions:%s:%d\n",
1907 node->asm_name (), node->tp_first_run);
1908 node->process = 0;
1909 node->expand ();
1910 }
1911 }
1912
1913 if (dump_file)
1914 fprintf (dump_file, "Expanded functions with time profile (%s):%u/%u\n",
1915 main_input_filename, profiled_func_count, expanded_func_count);
1916
1917 if (symtab->dump_file && flag_profile_reorder_functions)
1918 fprintf (symtab->dump_file, "Expanded functions with time profile:%u/%u\n",
1919 profiled_func_count, expanded_func_count);
1920
1921 symtab->process_new_functions ();
1922 free_gimplify_stack ();
1923
1924 free (order);
1925 }
1926
1927 /* This is used to sort the node types by the cgraph order number. */
1928
1929 enum cgraph_order_sort_kind
1930 {
1931 ORDER_UNDEFINED = 0,
1932 ORDER_FUNCTION,
1933 ORDER_VAR,
1934 ORDER_ASM
1935 };
1936
1937 struct cgraph_order_sort
1938 {
1939 enum cgraph_order_sort_kind kind;
1940 union
1941 {
1942 cgraph_node *f;
1943 varpool_node *v;
1944 asm_node *a;
1945 } u;
1946 };
1947
1948 /* Output all functions, variables, and asm statements in the order
1949 according to their order fields, which is the order in which they
1950 appeared in the file. This implements -fno-toplevel-reorder. In
1951 this mode we may output functions and variables which don't really
1952 need to be output.
1953 When NO_REORDER is true only do this for symbols marked no reorder. */
1954
1955 static void
1956 output_in_order (bool no_reorder)
1957 {
1958 int max;
1959 cgraph_order_sort *nodes;
1960 int i;
1961 cgraph_node *pf;
1962 varpool_node *pv;
1963 asm_node *pa;
1964 max = symtab->order;
1965 nodes = XCNEWVEC (cgraph_order_sort, max);
1966
1967 FOR_EACH_DEFINED_FUNCTION (pf)
1968 {
1969 if (pf->process && !pf->thunk.thunk_p && !pf->alias)
1970 {
1971 if (no_reorder && !pf->no_reorder)
1972 continue;
1973 i = pf->order;
1974 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1975 nodes[i].kind = ORDER_FUNCTION;
1976 nodes[i].u.f = pf;
1977 }
1978 }
1979
1980 FOR_EACH_DEFINED_VARIABLE (pv)
1981 if (!DECL_EXTERNAL (pv->decl))
1982 {
1983 if (no_reorder && !pv->no_reorder)
1984 continue;
1985 i = pv->order;
1986 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1987 nodes[i].kind = ORDER_VAR;
1988 nodes[i].u.v = pv;
1989 }
1990
1991 for (pa = symtab->first_asm_symbol (); pa; pa = pa->next)
1992 {
1993 i = pa->order;
1994 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1995 nodes[i].kind = ORDER_ASM;
1996 nodes[i].u.a = pa;
1997 }
1998
1999 /* In toplevel reorder mode we output all statics; mark them as needed. */
2000
2001 for (i = 0; i < max; ++i)
2002 if (nodes[i].kind == ORDER_VAR)
2003 nodes[i].u.v->finalize_named_section_flags ();
2004
2005 for (i = 0; i < max; ++i)
2006 {
2007 switch (nodes[i].kind)
2008 {
2009 case ORDER_FUNCTION:
2010 nodes[i].u.f->process = 0;
2011 nodes[i].u.f->expand ();
2012 break;
2013
2014 case ORDER_VAR:
2015 nodes[i].u.v->assemble_decl ();
2016 break;
2017
2018 case ORDER_ASM:
2019 assemble_asm (nodes[i].u.a->asm_str);
2020 break;
2021
2022 case ORDER_UNDEFINED:
2023 break;
2024
2025 default:
2026 gcc_unreachable ();
2027 }
2028 }
2029
2030 symtab->clear_asm_symbols ();
2031
2032 free (nodes);
2033 }
2034
2035 static void
2036 ipa_passes (void)
2037 {
2038 gcc::pass_manager *passes = g->get_passes ();
2039
2040 set_cfun (NULL);
2041 current_function_decl = NULL;
2042 gimple_register_cfg_hooks ();
2043 bitmap_obstack_initialize (NULL);
2044
2045 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
2046
2047 if (!in_lto_p)
2048 {
2049 execute_ipa_pass_list (passes->all_small_ipa_passes);
2050 if (seen_error ())
2051 return;
2052 }
2053
2054 /* This extra symtab_remove_unreachable_nodes pass tends to catch some
2055 devirtualization and other changes where removal iterate. */
2056 symtab->remove_unreachable_nodes (true, symtab->dump_file);
2057
2058 /* If pass_all_early_optimizations was not scheduled, the state of
2059 the cgraph will not be properly updated. Update it now. */
2060 if (symtab->state < IPA_SSA)
2061 symtab->state = IPA_SSA;
2062
2063 if (!in_lto_p)
2064 {
2065 /* Generate coverage variables and constructors. */
2066 coverage_finish ();
2067
2068 /* Process new functions added. */
2069 set_cfun (NULL);
2070 current_function_decl = NULL;
2071 symtab->process_new_functions ();
2072
2073 execute_ipa_summary_passes
2074 ((ipa_opt_pass_d *) passes->all_regular_ipa_passes);
2075 }
2076
2077 /* Some targets need to handle LTO assembler output specially. */
2078 if (flag_generate_lto)
2079 targetm.asm_out.lto_start ();
2080
2081 if (!in_lto_p)
2082 {
2083 if (g->have_offload)
2084 {
2085 section_name_prefix = OFFLOAD_SECTION_NAME_PREFIX;
2086 ipa_write_summaries (true);
2087 }
2088 if (flag_lto)
2089 {
2090 section_name_prefix = LTO_SECTION_NAME_PREFIX;
2091 ipa_write_summaries (false);
2092 }
2093 }
2094
2095 if (flag_generate_lto)
2096 targetm.asm_out.lto_end ();
2097
2098 if (!flag_ltrans && (in_lto_p || !flag_lto || flag_fat_lto_objects))
2099 execute_ipa_pass_list (passes->all_regular_ipa_passes);
2100 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
2101
2102 bitmap_obstack_release (NULL);
2103 }
2104
2105
2106 /* Return string alias is alias of. */
2107
2108 static tree
2109 get_alias_symbol (tree decl)
2110 {
2111 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
2112 return get_identifier (TREE_STRING_POINTER
2113 (TREE_VALUE (TREE_VALUE (alias))));
2114 }
2115
2116
2117 /* Weakrefs may be associated to external decls and thus not output
2118 at expansion time. Emit all necessary aliases. */
2119
2120 void
2121 symbol_table::output_weakrefs (void)
2122 {
2123 symtab_node *node;
2124 cgraph_node *cnode;
2125 FOR_EACH_SYMBOL (node)
2126 if (node->alias
2127 && !TREE_ASM_WRITTEN (node->decl)
2128 && (!(cnode = dyn_cast <cgraph_node *> (node))
2129 || !cnode->instrumented_version
2130 || !TREE_ASM_WRITTEN (cnode->instrumented_version->decl))
2131 && node->weakref)
2132 {
2133 tree target;
2134
2135 /* Weakrefs are special by not requiring target definition in current
2136 compilation unit. It is thus bit hard to work out what we want to
2137 alias.
2138 When alias target is defined, we need to fetch it from symtab reference,
2139 otherwise it is pointed to by alias_target. */
2140 if (node->alias_target)
2141 target = (DECL_P (node->alias_target)
2142 ? DECL_ASSEMBLER_NAME (node->alias_target)
2143 : node->alias_target);
2144 else if (node->analyzed)
2145 target = DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl);
2146 else
2147 {
2148 gcc_unreachable ();
2149 target = get_alias_symbol (node->decl);
2150 }
2151 do_assemble_alias (node->decl, target);
2152 }
2153 }
2154
2155 /* Perform simple optimizations based on callgraph. */
2156
2157 void
2158 symbol_table::compile (void)
2159 {
2160 if (seen_error ())
2161 return;
2162
2163 #ifdef ENABLE_CHECKING
2164 symtab_node::verify_symtab_nodes ();
2165 #endif
2166
2167 timevar_push (TV_CGRAPHOPT);
2168 if (pre_ipa_mem_report)
2169 {
2170 fprintf (stderr, "Memory consumption before IPA\n");
2171 dump_memory_report (false);
2172 }
2173 if (!quiet_flag)
2174 fprintf (stderr, "Performing interprocedural optimizations\n");
2175 state = IPA;
2176
2177 /* Offloading requires LTO infrastructure. */
2178 if (!in_lto_p && g->have_offload)
2179 flag_generate_lto = 1;
2180
2181 /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
2182 if (flag_generate_lto)
2183 lto_streamer_hooks_init ();
2184
2185 /* Don't run the IPA passes if there was any error or sorry messages. */
2186 if (!seen_error ())
2187 ipa_passes ();
2188
2189 /* Do nothing else if any IPA pass found errors or if we are just streaming LTO. */
2190 if (seen_error ()
2191 || (!in_lto_p && flag_lto && !flag_fat_lto_objects))
2192 {
2193 timevar_pop (TV_CGRAPHOPT);
2194 return;
2195 }
2196
2197 /* This pass remove bodies of extern inline functions we never inlined.
2198 Do this later so other IPA passes see what is really going on.
2199 FIXME: This should be run just after inlining by pasmanager. */
2200 remove_unreachable_nodes (false, dump_file);
2201 global_info_ready = true;
2202 if (dump_file)
2203 {
2204 fprintf (dump_file, "Optimized ");
2205 symtab_node:: dump_table (dump_file);
2206 }
2207 if (post_ipa_mem_report)
2208 {
2209 fprintf (stderr, "Memory consumption after IPA\n");
2210 dump_memory_report (false);
2211 }
2212 timevar_pop (TV_CGRAPHOPT);
2213
2214 /* Output everything. */
2215 (*debug_hooks->assembly_start) ();
2216 if (!quiet_flag)
2217 fprintf (stderr, "Assembling functions:\n");
2218 #ifdef ENABLE_CHECKING
2219 symtab_node::verify_symtab_nodes ();
2220 #endif
2221
2222 materialize_all_clones ();
2223 bitmap_obstack_initialize (NULL);
2224 execute_ipa_pass_list (g->get_passes ()->all_late_ipa_passes);
2225 bitmap_obstack_release (NULL);
2226 mark_functions_to_output ();
2227
2228 /* When weakref support is missing, we autmatically translate all
2229 references to NODE to references to its ultimate alias target.
2230 The renaming mechanizm uses flag IDENTIFIER_TRANSPARENT_ALIAS and
2231 TREE_CHAIN.
2232
2233 Set up this mapping before we output any assembler but once we are sure
2234 that all symbol renaming is done.
2235
2236 FIXME: All this uglyness can go away if we just do renaming at gimple
2237 level by physically rewritting the IL. At the moment we can only redirect
2238 calls, so we need infrastructure for renaming references as well. */
2239 #ifndef ASM_OUTPUT_WEAKREF
2240 symtab_node *node;
2241
2242 FOR_EACH_SYMBOL (node)
2243 if (node->alias
2244 && lookup_attribute ("weakref", DECL_ATTRIBUTES (node->decl)))
2245 {
2246 IDENTIFIER_TRANSPARENT_ALIAS
2247 (DECL_ASSEMBLER_NAME (node->decl)) = 1;
2248 TREE_CHAIN (DECL_ASSEMBLER_NAME (node->decl))
2249 = (node->alias_target ? node->alias_target
2250 : DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl));
2251 }
2252 #endif
2253
2254 state = EXPANSION;
2255
2256 if (!flag_toplevel_reorder)
2257 output_in_order (false);
2258 else
2259 {
2260 /* Output first asm statements and anything ordered. The process
2261 flag is cleared for these nodes, so we skip them later. */
2262 output_in_order (true);
2263 expand_all_functions ();
2264 output_variables ();
2265 }
2266
2267 process_new_functions ();
2268 state = FINISHED;
2269 output_weakrefs ();
2270
2271 if (dump_file)
2272 {
2273 fprintf (dump_file, "\nFinal ");
2274 symtab_node::dump_table (dump_file);
2275 }
2276 #ifdef ENABLE_CHECKING
2277 symtab_node::verify_symtab_nodes ();
2278 /* Double check that all inline clones are gone and that all
2279 function bodies have been released from memory. */
2280 if (!seen_error ())
2281 {
2282 cgraph_node *node;
2283 bool error_found = false;
2284
2285 FOR_EACH_DEFINED_FUNCTION (node)
2286 if (node->global.inlined_to
2287 || gimple_has_body_p (node->decl))
2288 {
2289 error_found = true;
2290 node->debug ();
2291 }
2292 if (error_found)
2293 internal_error ("nodes with unreleased memory found");
2294 }
2295 #endif
2296 }
2297
2298
2299 /* Analyze the whole compilation unit once it is parsed completely. */
2300
2301 void
2302 symbol_table::finalize_compilation_unit (void)
2303 {
2304 timevar_push (TV_CGRAPH);
2305
2306 /* If we're here there's no current function anymore. Some frontends
2307 are lazy in clearing these. */
2308 current_function_decl = NULL;
2309 set_cfun (NULL);
2310
2311 /* Do not skip analyzing the functions if there were errors, we
2312 miss diagnostics for following functions otherwise. */
2313
2314 /* Emit size functions we didn't inline. */
2315 finalize_size_functions ();
2316
2317 /* Mark alias targets necessary and emit diagnostics. */
2318 handle_alias_pairs ();
2319
2320 if (!quiet_flag)
2321 {
2322 fprintf (stderr, "\nAnalyzing compilation unit\n");
2323 fflush (stderr);
2324 }
2325
2326 if (flag_dump_passes)
2327 dump_passes ();
2328
2329 /* Gimplify and lower all functions, compute reachability and
2330 remove unreachable nodes. */
2331 analyze_functions ();
2332
2333 /* Mark alias targets necessary and emit diagnostics. */
2334 handle_alias_pairs ();
2335
2336 /* Gimplify and lower thunks. */
2337 analyze_functions ();
2338
2339 /* Finally drive the pass manager. */
2340 compile ();
2341
2342 timevar_pop (TV_CGRAPH);
2343 }
2344
2345 /* Reset all state within cgraphunit.c so that we can rerun the compiler
2346 within the same process. For use by toplev::finalize. */
2347
2348 void
2349 cgraphunit_c_finalize (void)
2350 {
2351 gcc_assert (cgraph_new_nodes.length () == 0);
2352 cgraph_new_nodes.truncate (0);
2353
2354 vtable_entry_type = NULL;
2355 queued_nodes = &symtab_terminator;
2356
2357 first_analyzed = NULL;
2358 first_analyzed_var = NULL;
2359 }
2360
2361 /* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
2362 kind of wrapper method. */
2363
2364 void
2365 cgraph_node::create_wrapper (cgraph_node *target)
2366 {
2367 /* Preserve DECL_RESULT so we get right by reference flag. */
2368 tree decl_result = DECL_RESULT (decl);
2369
2370 /* Remove the function's body but keep arguments to be reused
2371 for thunk. */
2372 release_body (true);
2373 reset ();
2374
2375 DECL_RESULT (decl) = decl_result;
2376 DECL_INITIAL (decl) = NULL;
2377 allocate_struct_function (decl, false);
2378 set_cfun (NULL);
2379
2380 /* Turn alias into thunk and expand it into GIMPLE representation. */
2381 definition = true;
2382 thunk.thunk_p = true;
2383 thunk.this_adjusting = false;
2384
2385 cgraph_edge *e = create_edge (target, NULL, 0, CGRAPH_FREQ_BASE);
2386
2387 tree arguments = DECL_ARGUMENTS (decl);
2388
2389 while (arguments)
2390 {
2391 TREE_ADDRESSABLE (arguments) = false;
2392 arguments = TREE_CHAIN (arguments);
2393 }
2394
2395 expand_thunk (false, true);
2396 e->call_stmt_cannot_inline_p = true;
2397
2398 /* Inline summary set-up. */
2399 analyze ();
2400 inline_analyze_function (this);
2401 }
2402
2403 #include "gt-cgraphunit.h"