cgraphunit.c (analyze_functions): Use opt_for_fn.
[gcc.git] / gcc / cgraphunit.c
1 /* Driver of optimization process
2 Copyright (C) 2003-2014 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This module implements main driver of compilation process.
22
23 The main scope of this file is to act as an interface in between
24 tree based frontends and the backend.
25
26 The front-end is supposed to use following functionality:
27
28 - finalize_function
29
30 This function is called once front-end has parsed whole body of function
31 and it is certain that the function body nor the declaration will change.
32
33 (There is one exception needed for implementing GCC extern inline
34 function.)
35
36 - varpool_finalize_decl
37
38 This function has same behavior as the above but is used for static
39 variables.
40
41 - add_asm_node
42
43 Insert new toplevel ASM statement
44
45 - finalize_compilation_unit
46
47 This function is called once (source level) compilation unit is finalized
48 and it will no longer change.
49
50 The symbol table is constructed starting from the trivially needed
51 symbols finalized by the frontend. Functions are lowered into
52 GIMPLE representation and callgraph/reference lists are constructed.
53 Those are used to discover other necessary functions and variables.
54
55 At the end the bodies of unreachable functions are removed.
56
57 The function can be called multiple times when multiple source level
58 compilation units are combined.
59
60 - compile
61
62 This passes control to the back-end. Optimizations are performed and
63 final assembler is generated. This is done in the following way. Note
64 that with link time optimization the process is split into three
65 stages (compile time, linktime analysis and parallel linktime as
66 indicated bellow).
67
68 Compile time:
69
70 1) Inter-procedural optimization.
71 (ipa_passes)
72
73 This part is further split into:
74
75 a) early optimizations. These are local passes executed in
76 the topological order on the callgraph.
77
78 The purpose of early optimiations is to optimize away simple
79 things that may otherwise confuse IP analysis. Very simple
80 propagation across the callgraph is done i.e. to discover
81 functions without side effects and simple inlining is performed.
82
83 b) early small interprocedural passes.
84
85 Those are interprocedural passes executed only at compilation
86 time. These include, for example, transational memory lowering,
87 unreachable code removal and other simple transformations.
88
89 c) IP analysis stage. All interprocedural passes do their
90 analysis.
91
92 Interprocedural passes differ from small interprocedural
93 passes by their ability to operate across whole program
94 at linktime. Their analysis stage is performed early to
95 both reduce linking times and linktime memory usage by
96 not having to represent whole program in memory.
97
98 d) LTO sreaming. When doing LTO, everything important gets
99 streamed into the object file.
100
101 Compile time and or linktime analysis stage (WPA):
102
103 At linktime units gets streamed back and symbol table is
104 merged. Function bodies are not streamed in and not
105 available.
106 e) IP propagation stage. All IP passes execute their
107 IP propagation. This is done based on the earlier analysis
108 without having function bodies at hand.
109 f) Ltrans streaming. When doing WHOPR LTO, the program
110 is partitioned and streamed into multple object files.
111
112 Compile time and/or parallel linktime stage (ltrans)
113
114 Each of the object files is streamed back and compiled
115 separately. Now the function bodies becomes available
116 again.
117
118 2) Virtual clone materialization
119 (cgraph_materialize_clone)
120
121 IP passes can produce copies of existing functoins (such
122 as versioned clones or inline clones) without actually
123 manipulating their bodies by creating virtual clones in
124 the callgraph. At this time the virtual clones are
125 turned into real functions
126 3) IP transformation
127
128 All IP passes transform function bodies based on earlier
129 decision of the IP propagation.
130
131 4) late small IP passes
132
133 Simple IP passes working within single program partition.
134
135 5) Expansion
136 (expand_all_functions)
137
138 At this stage functions that needs to be output into
139 assembler are identified and compiled in topological order
140 6) Output of variables and aliases
141 Now it is known what variable references was not optimized
142 out and thus all variables are output to the file.
143
144 Note that with -fno-toplevel-reorder passes 5 and 6
145 are combined together in cgraph_output_in_order.
146
147 Finally there are functions to manipulate the callgraph from
148 backend.
149 - cgraph_add_new_function is used to add backend produced
150 functions introduced after the unit is finalized.
151 The functions are enqueue for later processing and inserted
152 into callgraph with cgraph_process_new_functions.
153
154 - cgraph_function_versioning
155
156 produces a copy of function into new one (a version)
157 and apply simple transformations
158 */
159
160 #include "config.h"
161 #include "system.h"
162 #include "coretypes.h"
163 #include "tm.h"
164 #include "tree.h"
165 #include "varasm.h"
166 #include "stor-layout.h"
167 #include "stringpool.h"
168 #include "output.h"
169 #include "rtl.h"
170 #include "predict.h"
171 #include "vec.h"
172 #include "hashtab.h"
173 #include "hash-set.h"
174 #include "machmode.h"
175 #include "hard-reg-set.h"
176 #include "input.h"
177 #include "function.h"
178 #include "basic-block.h"
179 #include "tree-ssa-alias.h"
180 #include "internal-fn.h"
181 #include "gimple-fold.h"
182 #include "gimple-expr.h"
183 #include "is-a.h"
184 #include "gimple.h"
185 #include "gimplify.h"
186 #include "gimple-iterator.h"
187 #include "gimplify-me.h"
188 #include "gimple-ssa.h"
189 #include "tree-cfg.h"
190 #include "tree-into-ssa.h"
191 #include "tree-ssa.h"
192 #include "tree-inline.h"
193 #include "langhooks.h"
194 #include "toplev.h"
195 #include "flags.h"
196 #include "debug.h"
197 #include "target.h"
198 #include "diagnostic.h"
199 #include "params.h"
200 #include "intl.h"
201 #include "hash-map.h"
202 #include "plugin-api.h"
203 #include "ipa-ref.h"
204 #include "cgraph.h"
205 #include "alloc-pool.h"
206 #include "ipa-prop.h"
207 #include "tree-iterator.h"
208 #include "tree-pass.h"
209 #include "tree-dump.h"
210 #include "gimple-pretty-print.h"
211 #include "output.h"
212 #include "coverage.h"
213 #include "plugin.h"
214 #include "ipa-inline.h"
215 #include "ipa-utils.h"
216 #include "lto-streamer.h"
217 #include "except.h"
218 #include "cfgloop.h"
219 #include "regset.h" /* FIXME: For reg_obstack. */
220 #include "context.h"
221 #include "pass_manager.h"
222 #include "tree-nested.h"
223 #include "gimplify.h"
224 #include "dbgcnt.h"
225 #include "tree-chkp.h"
226 #include "lto-section-names.h"
227 #include "omp-low.h"
228
229 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
230 secondary queue used during optimization to accommodate passes that
231 may generate new functions that need to be optimized and expanded. */
232 vec<cgraph_node *> cgraph_new_nodes;
233
234 static void expand_all_functions (void);
235 static void mark_functions_to_output (void);
236 static void handle_alias_pairs (void);
237
238 /* Used for vtable lookup in thunk adjusting. */
239 static GTY (()) tree vtable_entry_type;
240
241 /* Determine if symbol declaration is needed. That is, visible to something
242 either outside this translation unit, something magic in the system
243 configury */
244 bool
245 symtab_node::needed_p (void)
246 {
247 /* Double check that no one output the function into assembly file
248 early. */
249 gcc_checking_assert (!DECL_ASSEMBLER_NAME_SET_P (decl)
250 || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)));
251
252 if (!definition)
253 return false;
254
255 if (DECL_EXTERNAL (decl))
256 return false;
257
258 /* If the user told us it is used, then it must be so. */
259 if (force_output)
260 return true;
261
262 /* ABI forced symbols are needed when they are external. */
263 if (forced_by_abi && TREE_PUBLIC (decl))
264 return true;
265
266 /* Keep constructors, destructors and virtual functions. */
267 if (TREE_CODE (decl) == FUNCTION_DECL
268 && (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl)))
269 return true;
270
271 /* Externally visible variables must be output. The exception is
272 COMDAT variables that must be output only when they are needed. */
273 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
274 return true;
275
276 return false;
277 }
278
279 /* Head and terminator of the queue of nodes to be processed while building
280 callgraph. */
281
282 static symtab_node symtab_terminator;
283 static symtab_node *queued_nodes = &symtab_terminator;
284
285 /* Add NODE to queue starting at QUEUED_NODES.
286 The queue is linked via AUX pointers and terminated by pointer to 1. */
287
288 static void
289 enqueue_node (symtab_node *node)
290 {
291 if (node->aux)
292 return;
293 gcc_checking_assert (queued_nodes);
294 node->aux = queued_nodes;
295 queued_nodes = node;
296 }
297
298 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
299 functions into callgraph in a way so they look like ordinary reachable
300 functions inserted into callgraph already at construction time. */
301
302 void
303 symbol_table::process_new_functions (void)
304 {
305 tree fndecl;
306
307 if (!cgraph_new_nodes.exists ())
308 return;
309
310 handle_alias_pairs ();
311 /* Note that this queue may grow as its being processed, as the new
312 functions may generate new ones. */
313 for (unsigned i = 0; i < cgraph_new_nodes.length (); i++)
314 {
315 cgraph_node *node = cgraph_new_nodes[i];
316 fndecl = node->decl;
317 switch (state)
318 {
319 case CONSTRUCTION:
320 /* At construction time we just need to finalize function and move
321 it into reachable functions list. */
322
323 cgraph_node::finalize_function (fndecl, false);
324 call_cgraph_insertion_hooks (node);
325 enqueue_node (node);
326 break;
327
328 case IPA:
329 case IPA_SSA:
330 /* When IPA optimization already started, do all essential
331 transformations that has been already performed on the whole
332 cgraph but not on this function. */
333
334 gimple_register_cfg_hooks ();
335 if (!node->analyzed)
336 node->analyze ();
337 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
338 if (state == IPA_SSA
339 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
340 g->get_passes ()->execute_early_local_passes ();
341 else if (inline_summary_vec != NULL)
342 compute_inline_parameters (node, true);
343 free_dominance_info (CDI_POST_DOMINATORS);
344 free_dominance_info (CDI_DOMINATORS);
345 pop_cfun ();
346 call_cgraph_insertion_hooks (node);
347 break;
348
349 case EXPANSION:
350 /* Functions created during expansion shall be compiled
351 directly. */
352 node->process = 0;
353 call_cgraph_insertion_hooks (node);
354 node->expand ();
355 break;
356
357 default:
358 gcc_unreachable ();
359 break;
360 }
361 }
362
363 cgraph_new_nodes.release ();
364 }
365
366 /* As an GCC extension we allow redefinition of the function. The
367 semantics when both copies of bodies differ is not well defined.
368 We replace the old body with new body so in unit at a time mode
369 we always use new body, while in normal mode we may end up with
370 old body inlined into some functions and new body expanded and
371 inlined in others.
372
373 ??? It may make more sense to use one body for inlining and other
374 body for expanding the function but this is difficult to do. */
375
376 void
377 cgraph_node::reset (void)
378 {
379 /* If process is set, then we have already begun whole-unit analysis.
380 This is *not* testing for whether we've already emitted the function.
381 That case can be sort-of legitimately seen with real function redefinition
382 errors. I would argue that the front end should never present us with
383 such a case, but don't enforce that for now. */
384 gcc_assert (!process);
385
386 /* Reset our data structures so we can analyze the function again. */
387 memset (&local, 0, sizeof (local));
388 memset (&global, 0, sizeof (global));
389 memset (&rtl, 0, sizeof (rtl));
390 analyzed = false;
391 definition = false;
392 alias = false;
393 weakref = false;
394 cpp_implicit_alias = false;
395
396 remove_callees ();
397 remove_all_references ();
398 }
399
400 /* Return true when there are references to the node. */
401
402 bool
403 symtab_node::referred_to_p (void)
404 {
405 ipa_ref *ref = NULL;
406
407 /* See if there are any references at all. */
408 if (iterate_referring (0, ref))
409 return true;
410 /* For functions check also calls. */
411 cgraph_node *cn = dyn_cast <cgraph_node *> (this);
412 if (cn && cn->callers)
413 return true;
414 return false;
415 }
416
417 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
418 logic in effect. If NO_COLLECT is true, then our caller cannot stand to have
419 the garbage collector run at the moment. We would need to either create
420 a new GC context, or just not compile right now. */
421
422 void
423 cgraph_node::finalize_function (tree decl, bool no_collect)
424 {
425 cgraph_node *node = cgraph_node::get_create (decl);
426
427 if (node->definition)
428 {
429 /* Nested functions should only be defined once. */
430 gcc_assert (!DECL_CONTEXT (decl)
431 || TREE_CODE (DECL_CONTEXT (decl)) != FUNCTION_DECL);
432 node->reset ();
433 node->local.redefined_extern_inline = true;
434 }
435
436 notice_global_symbol (decl);
437 node->definition = true;
438 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
439
440 /* With -fkeep-inline-functions we are keeping all inline functions except
441 for extern inline ones. */
442 if (flag_keep_inline_functions
443 && DECL_DECLARED_INLINE_P (decl)
444 && !DECL_EXTERNAL (decl)
445 && !DECL_DISREGARD_INLINE_LIMITS (decl))
446 node->force_output = 1;
447
448 /* When not optimizing, also output the static functions. (see
449 PR24561), but don't do so for always_inline functions, functions
450 declared inline and nested functions. These were optimized out
451 in the original implementation and it is unclear whether we want
452 to change the behavior here. */
453 if ((!optimize
454 && !node->cpp_implicit_alias
455 && !DECL_DISREGARD_INLINE_LIMITS (decl)
456 && !DECL_DECLARED_INLINE_P (decl)
457 && !(DECL_CONTEXT (decl)
458 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL))
459 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
460 node->force_output = 1;
461
462 /* If we've not yet emitted decl, tell the debug info about it. */
463 if (!TREE_ASM_WRITTEN (decl))
464 (*debug_hooks->deferred_inline_function) (decl);
465
466 /* Possibly warn about unused parameters. */
467 if (warn_unused_parameter)
468 do_warn_unused_parameter (decl);
469
470 if (!no_collect)
471 ggc_collect ();
472
473 if (symtab->state == CONSTRUCTION
474 && (node->needed_p () || node->referred_to_p ()))
475 enqueue_node (node);
476 }
477
478 /* Add the function FNDECL to the call graph.
479 Unlike finalize_function, this function is intended to be used
480 by middle end and allows insertion of new function at arbitrary point
481 of compilation. The function can be either in high, low or SSA form
482 GIMPLE.
483
484 The function is assumed to be reachable and have address taken (so no
485 API breaking optimizations are performed on it).
486
487 Main work done by this function is to enqueue the function for later
488 processing to avoid need the passes to be re-entrant. */
489
490 void
491 cgraph_node::add_new_function (tree fndecl, bool lowered)
492 {
493 gcc::pass_manager *passes = g->get_passes ();
494 cgraph_node *node;
495 switch (symtab->state)
496 {
497 case PARSING:
498 cgraph_node::finalize_function (fndecl, false);
499 break;
500 case CONSTRUCTION:
501 /* Just enqueue function to be processed at nearest occurrence. */
502 node = cgraph_node::get_create (fndecl);
503 if (lowered)
504 node->lowered = true;
505 cgraph_new_nodes.safe_push (node);
506 break;
507
508 case IPA:
509 case IPA_SSA:
510 case EXPANSION:
511 /* Bring the function into finalized state and enqueue for later
512 analyzing and compilation. */
513 node = cgraph_node::get_create (fndecl);
514 node->local.local = false;
515 node->definition = true;
516 node->force_output = true;
517 if (!lowered && symtab->state == EXPANSION)
518 {
519 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
520 gimple_register_cfg_hooks ();
521 bitmap_obstack_initialize (NULL);
522 execute_pass_list (cfun, passes->all_lowering_passes);
523 passes->execute_early_local_passes ();
524 bitmap_obstack_release (NULL);
525 pop_cfun ();
526
527 lowered = true;
528 }
529 if (lowered)
530 node->lowered = true;
531 cgraph_new_nodes.safe_push (node);
532 break;
533
534 case FINISHED:
535 /* At the very end of compilation we have to do all the work up
536 to expansion. */
537 node = cgraph_node::create (fndecl);
538 if (lowered)
539 node->lowered = true;
540 node->definition = true;
541 node->analyze ();
542 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
543 gimple_register_cfg_hooks ();
544 bitmap_obstack_initialize (NULL);
545 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
546 g->get_passes ()->execute_early_local_passes ();
547 bitmap_obstack_release (NULL);
548 pop_cfun ();
549 node->expand ();
550 break;
551
552 default:
553 gcc_unreachable ();
554 }
555
556 /* Set a personality if required and we already passed EH lowering. */
557 if (lowered
558 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
559 == eh_personality_lang))
560 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
561 }
562
563 /* Analyze the function scheduled to be output. */
564 void
565 cgraph_node::analyze (void)
566 {
567 tree decl = this->decl;
568 location_t saved_loc = input_location;
569 input_location = DECL_SOURCE_LOCATION (decl);
570
571 if (thunk.thunk_p)
572 {
573 create_edge (cgraph_node::get (thunk.alias),
574 NULL, 0, CGRAPH_FREQ_BASE);
575 if (!expand_thunk (false, false))
576 {
577 thunk.alias = NULL;
578 return;
579 }
580 thunk.alias = NULL;
581 }
582 if (alias)
583 resolve_alias (cgraph_node::get (alias_target));
584 else if (dispatcher_function)
585 {
586 /* Generate the dispatcher body of multi-versioned functions. */
587 cgraph_function_version_info *dispatcher_version_info
588 = function_version ();
589 if (dispatcher_version_info != NULL
590 && (dispatcher_version_info->dispatcher_resolver
591 == NULL_TREE))
592 {
593 tree resolver = NULL_TREE;
594 gcc_assert (targetm.generate_version_dispatcher_body);
595 resolver = targetm.generate_version_dispatcher_body (this);
596 gcc_assert (resolver != NULL_TREE);
597 }
598 }
599 else
600 {
601 push_cfun (DECL_STRUCT_FUNCTION (decl));
602
603 assign_assembler_name_if_neeeded (decl);
604
605 /* Make sure to gimplify bodies only once. During analyzing a
606 function we lower it, which will require gimplified nested
607 functions, so we can end up here with an already gimplified
608 body. */
609 if (!gimple_has_body_p (decl))
610 gimplify_function_tree (decl);
611 dump_function (TDI_generic, decl);
612
613 /* Lower the function. */
614 if (!lowered)
615 {
616 if (nested)
617 lower_nested_functions (decl);
618 gcc_assert (!nested);
619
620 gimple_register_cfg_hooks ();
621 bitmap_obstack_initialize (NULL);
622 execute_pass_list (cfun, g->get_passes ()->all_lowering_passes);
623 free_dominance_info (CDI_POST_DOMINATORS);
624 free_dominance_info (CDI_DOMINATORS);
625 compact_blocks ();
626 bitmap_obstack_release (NULL);
627 lowered = true;
628 }
629
630 pop_cfun ();
631 }
632 analyzed = true;
633
634 input_location = saved_loc;
635 }
636
637 /* C++ frontend produce same body aliases all over the place, even before PCH
638 gets streamed out. It relies on us linking the aliases with their function
639 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
640 first produce aliases without links, but once C++ FE is sure he won't sream
641 PCH we build the links via this function. */
642
643 void
644 symbol_table::process_same_body_aliases (void)
645 {
646 symtab_node *node;
647 FOR_EACH_SYMBOL (node)
648 if (node->cpp_implicit_alias && !node->analyzed)
649 node->resolve_alias
650 (TREE_CODE (node->alias_target) == VAR_DECL
651 ? (symtab_node *)varpool_node::get_create (node->alias_target)
652 : (symtab_node *)cgraph_node::get_create (node->alias_target));
653 cpp_implicit_aliases_done = true;
654 }
655
656 /* Process attributes common for vars and functions. */
657
658 static void
659 process_common_attributes (symtab_node *node, tree decl)
660 {
661 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
662
663 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
664 {
665 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
666 "%<weakref%> attribute should be accompanied with"
667 " an %<alias%> attribute");
668 DECL_WEAK (decl) = 0;
669 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
670 DECL_ATTRIBUTES (decl));
671 }
672
673 if (lookup_attribute ("no_reorder", DECL_ATTRIBUTES (decl)))
674 node->no_reorder = 1;
675 }
676
677 /* Look for externally_visible and used attributes and mark cgraph nodes
678 accordingly.
679
680 We cannot mark the nodes at the point the attributes are processed (in
681 handle_*_attribute) because the copy of the declarations available at that
682 point may not be canonical. For example, in:
683
684 void f();
685 void f() __attribute__((used));
686
687 the declaration we see in handle_used_attribute will be the second
688 declaration -- but the front end will subsequently merge that declaration
689 with the original declaration and discard the second declaration.
690
691 Furthermore, we can't mark these nodes in finalize_function because:
692
693 void f() {}
694 void f() __attribute__((externally_visible));
695
696 is valid.
697
698 So, we walk the nodes at the end of the translation unit, applying the
699 attributes at that point. */
700
701 static void
702 process_function_and_variable_attributes (cgraph_node *first,
703 varpool_node *first_var)
704 {
705 cgraph_node *node;
706 varpool_node *vnode;
707
708 for (node = symtab->first_function (); node != first;
709 node = symtab->next_function (node))
710 {
711 tree decl = node->decl;
712 if (DECL_PRESERVE_P (decl))
713 node->mark_force_output ();
714 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
715 {
716 if (! TREE_PUBLIC (node->decl))
717 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
718 "%<externally_visible%>"
719 " attribute have effect only on public objects");
720 }
721 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
722 && (node->definition && !node->alias))
723 {
724 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
725 "%<weakref%> attribute ignored"
726 " because function is defined");
727 DECL_WEAK (decl) = 0;
728 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
729 DECL_ATTRIBUTES (decl));
730 }
731
732 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
733 && !DECL_DECLARED_INLINE_P (decl)
734 /* redefining extern inline function makes it DECL_UNINLINABLE. */
735 && !DECL_UNINLINABLE (decl))
736 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
737 "always_inline function might not be inlinable");
738
739 process_common_attributes (node, decl);
740 }
741 for (vnode = symtab->first_variable (); vnode != first_var;
742 vnode = symtab->next_variable (vnode))
743 {
744 tree decl = vnode->decl;
745 if (DECL_EXTERNAL (decl)
746 && DECL_INITIAL (decl))
747 varpool_node::finalize_decl (decl);
748 if (DECL_PRESERVE_P (decl))
749 vnode->force_output = true;
750 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
751 {
752 if (! TREE_PUBLIC (vnode->decl))
753 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
754 "%<externally_visible%>"
755 " attribute have effect only on public objects");
756 }
757 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
758 && vnode->definition
759 && DECL_INITIAL (decl))
760 {
761 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
762 "%<weakref%> attribute ignored"
763 " because variable is initialized");
764 DECL_WEAK (decl) = 0;
765 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
766 DECL_ATTRIBUTES (decl));
767 }
768 process_common_attributes (vnode, decl);
769 }
770 }
771
772 /* Mark DECL as finalized. By finalizing the declaration, frontend instruct the
773 middle end to output the variable to asm file, if needed or externally
774 visible. */
775
776 void
777 varpool_node::finalize_decl (tree decl)
778 {
779 varpool_node *node = varpool_node::get_create (decl);
780
781 gcc_assert (TREE_STATIC (decl) || DECL_EXTERNAL (decl));
782
783 if (node->definition)
784 return;
785 notice_global_symbol (decl);
786 node->definition = true;
787 if (TREE_THIS_VOLATILE (decl) || DECL_PRESERVE_P (decl)
788 /* Traditionally we do not eliminate static variables when not
789 optimizing and when not doing toplevel reoder. */
790 || node->no_reorder
791 || ((!flag_toplevel_reorder
792 && !DECL_COMDAT (node->decl)
793 && !DECL_ARTIFICIAL (node->decl))))
794 node->force_output = true;
795
796 if (symtab->state == CONSTRUCTION
797 && (node->needed_p () || node->referred_to_p ()))
798 enqueue_node (node);
799 if (symtab->state >= IPA_SSA)
800 node->analyze ();
801 /* Some frontends produce various interface variables after compilation
802 finished. */
803 if (symtab->state == FINISHED
804 || (!flag_toplevel_reorder
805 && symtab->state == EXPANSION))
806 node->assemble_decl ();
807
808 if (DECL_INITIAL (decl))
809 chkp_register_var_initializer (decl);
810 }
811
812 /* EDGE is an polymorphic call. Mark all possible targets as reachable
813 and if there is only one target, perform trivial devirtualization.
814 REACHABLE_CALL_TARGETS collects target lists we already walked to
815 avoid udplicate work. */
816
817 static void
818 walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
819 cgraph_edge *edge)
820 {
821 unsigned int i;
822 void *cache_token;
823 bool final;
824 vec <cgraph_node *>targets
825 = possible_polymorphic_call_targets
826 (edge, &final, &cache_token);
827
828 if (!reachable_call_targets->add (cache_token))
829 {
830 if (symtab->dump_file)
831 dump_possible_polymorphic_call_targets
832 (symtab->dump_file, edge);
833
834 for (i = 0; i < targets.length (); i++)
835 {
836 /* Do not bother to mark virtual methods in anonymous namespace;
837 either we will find use of virtual table defining it, or it is
838 unused. */
839 if (targets[i]->definition
840 && TREE_CODE
841 (TREE_TYPE (targets[i]->decl))
842 == METHOD_TYPE
843 && !type_in_anonymous_namespace_p
844 (method_class_type
845 (TREE_TYPE (targets[i]->decl))))
846 enqueue_node (targets[i]);
847 }
848 }
849
850 /* Very trivial devirtualization; when the type is
851 final or anonymous (so we know all its derivation)
852 and there is only one possible virtual call target,
853 make the edge direct. */
854 if (final)
855 {
856 if (targets.length () <= 1 && dbg_cnt (devirt))
857 {
858 cgraph_node *target;
859 if (targets.length () == 1)
860 target = targets[0];
861 else
862 target = cgraph_node::create
863 (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
864
865 if (symtab->dump_file)
866 {
867 fprintf (symtab->dump_file,
868 "Devirtualizing call: ");
869 print_gimple_stmt (symtab->dump_file,
870 edge->call_stmt, 0,
871 TDF_SLIM);
872 }
873 if (dump_enabled_p ())
874 {
875 location_t locus = gimple_location_safe (edge->call_stmt);
876 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
877 "devirtualizing call in %s to %s\n",
878 edge->caller->name (), target->name ());
879 }
880
881 edge->make_direct (target);
882 edge->redirect_call_stmt_to_callee ();
883
884 /* Call to __builtin_unreachable shouldn't be instrumented. */
885 if (!targets.length ())
886 gimple_call_set_with_bounds (edge->call_stmt, false);
887
888 if (symtab->dump_file)
889 {
890 fprintf (symtab->dump_file,
891 "Devirtualized as: ");
892 print_gimple_stmt (symtab->dump_file,
893 edge->call_stmt, 0,
894 TDF_SLIM);
895 }
896 }
897 }
898 }
899
900
901 /* Discover all functions and variables that are trivially needed, analyze
902 them as well as all functions and variables referred by them */
903 static cgraph_node *first_analyzed;
904 static varpool_node *first_analyzed_var;
905
906 static void
907 analyze_functions (void)
908 {
909 /* Keep track of already processed nodes when called multiple times for
910 intermodule optimization. */
911 cgraph_node *first_handled = first_analyzed;
912 varpool_node *first_handled_var = first_analyzed_var;
913 hash_set<void *> reachable_call_targets;
914
915 symtab_node *node;
916 symtab_node *next;
917 int i;
918 ipa_ref *ref;
919 bool changed = true;
920 location_t saved_loc = input_location;
921
922 bitmap_obstack_initialize (NULL);
923 symtab->state = CONSTRUCTION;
924 input_location = UNKNOWN_LOCATION;
925
926 /* Ugly, but the fixup can not happen at a time same body alias is created;
927 C++ FE is confused about the COMDAT groups being right. */
928 if (symtab->cpp_implicit_aliases_done)
929 FOR_EACH_SYMBOL (node)
930 if (node->cpp_implicit_alias)
931 node->fixup_same_cpp_alias_visibility (node->get_alias_target ());
932 if (optimize && flag_devirtualize)
933 build_type_inheritance_graph ();
934
935 /* Analysis adds static variables that in turn adds references to new functions.
936 So we need to iterate the process until it stabilize. */
937 while (changed)
938 {
939 changed = false;
940 process_function_and_variable_attributes (first_analyzed,
941 first_analyzed_var);
942
943 /* First identify the trivially needed symbols. */
944 for (node = symtab->first_symbol ();
945 node != first_analyzed
946 && node != first_analyzed_var; node = node->next)
947 {
948 /* Convert COMDAT group designators to IDENTIFIER_NODEs. */
949 node->get_comdat_group_id ();
950 if (node->needed_p ())
951 {
952 enqueue_node (node);
953 if (!changed && symtab->dump_file)
954 fprintf (symtab->dump_file, "Trivially needed symbols:");
955 changed = true;
956 if (symtab->dump_file)
957 fprintf (symtab->dump_file, " %s", node->asm_name ());
958 if (!changed && symtab->dump_file)
959 fprintf (symtab->dump_file, "\n");
960 }
961 if (node == first_analyzed
962 || node == first_analyzed_var)
963 break;
964 }
965 symtab->process_new_functions ();
966 first_analyzed_var = symtab->first_variable ();
967 first_analyzed = symtab->first_function ();
968
969 if (changed && symtab->dump_file)
970 fprintf (symtab->dump_file, "\n");
971
972 /* Lower representation, build callgraph edges and references for all trivially
973 needed symbols and all symbols referred by them. */
974 while (queued_nodes != &symtab_terminator)
975 {
976 changed = true;
977 node = queued_nodes;
978 queued_nodes = (symtab_node *)queued_nodes->aux;
979 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
980 if (cnode && cnode->definition)
981 {
982 cgraph_edge *edge;
983 tree decl = cnode->decl;
984
985 /* ??? It is possible to create extern inline function
986 and later using weak alias attribute to kill its body.
987 See gcc.c-torture/compile/20011119-1.c */
988 if (!DECL_STRUCT_FUNCTION (decl)
989 && !cnode->alias
990 && !cnode->thunk.thunk_p
991 && !cnode->dispatcher_function)
992 {
993 cnode->reset ();
994 cnode->local.redefined_extern_inline = true;
995 continue;
996 }
997
998 if (!cnode->analyzed)
999 cnode->analyze ();
1000
1001 for (edge = cnode->callees; edge; edge = edge->next_callee)
1002 if (edge->callee->definition)
1003 enqueue_node (edge->callee);
1004 if (optimize && opt_for_fn (cnode->decl, flag_devirtualize))
1005 {
1006 cgraph_edge *next;
1007
1008 for (edge = cnode->indirect_calls; edge; edge = next)
1009 {
1010 next = edge->next_callee;
1011 if (edge->indirect_info->polymorphic)
1012 walk_polymorphic_call_targets (&reachable_call_targets,
1013 edge);
1014 }
1015 }
1016
1017 /* If decl is a clone of an abstract function,
1018 mark that abstract function so that we don't release its body.
1019 The DECL_INITIAL() of that abstract function declaration
1020 will be later needed to output debug info. */
1021 if (DECL_ABSTRACT_ORIGIN (decl))
1022 {
1023 cgraph_node *origin_node
1024 = cgraph_node::get_create (DECL_ABSTRACT_ORIGIN (decl));
1025 origin_node->used_as_abstract_origin = true;
1026 }
1027 }
1028 else
1029 {
1030 varpool_node *vnode = dyn_cast <varpool_node *> (node);
1031 if (vnode && vnode->definition && !vnode->analyzed)
1032 vnode->analyze ();
1033 }
1034
1035 if (node->same_comdat_group)
1036 {
1037 symtab_node *next;
1038 for (next = node->same_comdat_group;
1039 next != node;
1040 next = next->same_comdat_group)
1041 enqueue_node (next);
1042 }
1043 for (i = 0; node->iterate_reference (i, ref); i++)
1044 if (ref->referred->definition)
1045 enqueue_node (ref->referred);
1046 symtab->process_new_functions ();
1047 }
1048 }
1049 if (optimize && flag_devirtualize)
1050 update_type_inheritance_graph ();
1051
1052 /* Collect entry points to the unit. */
1053 if (symtab->dump_file)
1054 {
1055 fprintf (symtab->dump_file, "\n\nInitial ");
1056 symtab_node::dump_table (symtab->dump_file);
1057 }
1058
1059 if (symtab->dump_file)
1060 fprintf (symtab->dump_file, "\nRemoving unused symbols:");
1061
1062 for (node = symtab->first_symbol ();
1063 node != first_handled
1064 && node != first_handled_var; node = next)
1065 {
1066 next = node->next;
1067 if (!node->aux && !node->referred_to_p ())
1068 {
1069 if (symtab->dump_file)
1070 fprintf (symtab->dump_file, " %s", node->name ());
1071 node->remove ();
1072 continue;
1073 }
1074 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1075 {
1076 tree decl = node->decl;
1077
1078 if (cnode->definition && !gimple_has_body_p (decl)
1079 && !cnode->alias
1080 && !cnode->thunk.thunk_p)
1081 cnode->reset ();
1082
1083 gcc_assert (!cnode->definition || cnode->thunk.thunk_p
1084 || cnode->alias
1085 || gimple_has_body_p (decl));
1086 gcc_assert (cnode->analyzed == cnode->definition);
1087 }
1088 node->aux = NULL;
1089 }
1090 for (;node; node = node->next)
1091 node->aux = NULL;
1092 first_analyzed = symtab->first_function ();
1093 first_analyzed_var = symtab->first_variable ();
1094 if (symtab->dump_file)
1095 {
1096 fprintf (symtab->dump_file, "\n\nReclaimed ");
1097 symtab_node::dump_table (symtab->dump_file);
1098 }
1099 bitmap_obstack_release (NULL);
1100 ggc_collect ();
1101 /* Initialize assembler name hash, in particular we want to trigger C++
1102 mangling and same body alias creation before we free DECL_ARGUMENTS
1103 used by it. */
1104 if (!seen_error ())
1105 symtab->symtab_initialize_asm_name_hash ();
1106
1107 input_location = saved_loc;
1108 }
1109
1110 /* Translate the ugly representation of aliases as alias pairs into nice
1111 representation in callgraph. We don't handle all cases yet,
1112 unfortunately. */
1113
1114 static void
1115 handle_alias_pairs (void)
1116 {
1117 alias_pair *p;
1118 unsigned i;
1119
1120 for (i = 0; alias_pairs && alias_pairs->iterate (i, &p);)
1121 {
1122 symtab_node *target_node = symtab_node::get_for_asmname (p->target);
1123
1124 /* Weakrefs with target not defined in current unit are easy to handle:
1125 they behave just as external variables except we need to note the
1126 alias flag to later output the weakref pseudo op into asm file. */
1127 if (!target_node
1128 && lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL)
1129 {
1130 symtab_node *node = symtab_node::get (p->decl);
1131 if (node)
1132 {
1133 node->alias_target = p->target;
1134 node->weakref = true;
1135 node->alias = true;
1136 }
1137 alias_pairs->unordered_remove (i);
1138 continue;
1139 }
1140 else if (!target_node)
1141 {
1142 error ("%q+D aliased to undefined symbol %qE", p->decl, p->target);
1143 symtab_node *node = symtab_node::get (p->decl);
1144 if (node)
1145 node->alias = false;
1146 alias_pairs->unordered_remove (i);
1147 continue;
1148 }
1149
1150 if (DECL_EXTERNAL (target_node->decl)
1151 /* We use local aliases for C++ thunks to force the tailcall
1152 to bind locally. This is a hack - to keep it working do
1153 the following (which is not strictly correct). */
1154 && (TREE_CODE (target_node->decl) != FUNCTION_DECL
1155 || ! DECL_VIRTUAL_P (target_node->decl))
1156 && ! lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)))
1157 {
1158 error ("%q+D aliased to external symbol %qE",
1159 p->decl, p->target);
1160 }
1161
1162 if (TREE_CODE (p->decl) == FUNCTION_DECL
1163 && target_node && is_a <cgraph_node *> (target_node))
1164 {
1165 cgraph_node *src_node = cgraph_node::get (p->decl);
1166 if (src_node && src_node->definition)
1167 src_node->reset ();
1168 cgraph_node::create_alias (p->decl, target_node->decl);
1169 alias_pairs->unordered_remove (i);
1170 }
1171 else if (TREE_CODE (p->decl) == VAR_DECL
1172 && target_node && is_a <varpool_node *> (target_node))
1173 {
1174 varpool_node::create_alias (p->decl, target_node->decl);
1175 alias_pairs->unordered_remove (i);
1176 }
1177 else
1178 {
1179 error ("%q+D alias in between function and variable is not supported",
1180 p->decl);
1181 warning (0, "%q+D aliased declaration",
1182 target_node->decl);
1183 alias_pairs->unordered_remove (i);
1184 }
1185 }
1186 vec_free (alias_pairs);
1187 }
1188
1189
1190 /* Figure out what functions we want to assemble. */
1191
1192 static void
1193 mark_functions_to_output (void)
1194 {
1195 cgraph_node *node;
1196 #ifdef ENABLE_CHECKING
1197 bool check_same_comdat_groups = false;
1198
1199 FOR_EACH_FUNCTION (node)
1200 gcc_assert (!node->process);
1201 #endif
1202
1203 FOR_EACH_FUNCTION (node)
1204 {
1205 tree decl = node->decl;
1206
1207 gcc_assert (!node->process || node->same_comdat_group);
1208 if (node->process)
1209 continue;
1210
1211 /* We need to output all local functions that are used and not
1212 always inlined, as well as those that are reachable from
1213 outside the current compilation unit. */
1214 if (node->analyzed
1215 && !node->thunk.thunk_p
1216 && !node->alias
1217 && !node->global.inlined_to
1218 && !TREE_ASM_WRITTEN (decl)
1219 && !DECL_EXTERNAL (decl))
1220 {
1221 node->process = 1;
1222 if (node->same_comdat_group)
1223 {
1224 cgraph_node *next;
1225 for (next = dyn_cast<cgraph_node *> (node->same_comdat_group);
1226 next != node;
1227 next = dyn_cast<cgraph_node *> (next->same_comdat_group))
1228 if (!next->thunk.thunk_p && !next->alias
1229 && !next->comdat_local_p ())
1230 next->process = 1;
1231 }
1232 }
1233 else if (node->same_comdat_group)
1234 {
1235 #ifdef ENABLE_CHECKING
1236 check_same_comdat_groups = true;
1237 #endif
1238 }
1239 else
1240 {
1241 /* We should've reclaimed all functions that are not needed. */
1242 #ifdef ENABLE_CHECKING
1243 if (!node->global.inlined_to
1244 && gimple_has_body_p (decl)
1245 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1246 are inside partition, we can end up not removing the body since we no longer
1247 have analyzed node pointing to it. */
1248 && !node->in_other_partition
1249 && !node->alias
1250 && !node->clones
1251 && !DECL_EXTERNAL (decl))
1252 {
1253 node->debug ();
1254 internal_error ("failed to reclaim unneeded function");
1255 }
1256 #endif
1257 gcc_assert (node->global.inlined_to
1258 || !gimple_has_body_p (decl)
1259 || node->in_other_partition
1260 || node->clones
1261 || DECL_ARTIFICIAL (decl)
1262 || DECL_EXTERNAL (decl));
1263
1264 }
1265
1266 }
1267 #ifdef ENABLE_CHECKING
1268 if (check_same_comdat_groups)
1269 FOR_EACH_FUNCTION (node)
1270 if (node->same_comdat_group && !node->process)
1271 {
1272 tree decl = node->decl;
1273 if (!node->global.inlined_to
1274 && gimple_has_body_p (decl)
1275 /* FIXME: in an ltrans unit when the offline copy is outside a
1276 partition but inline copies are inside a partition, we can
1277 end up not removing the body since we no longer have an
1278 analyzed node pointing to it. */
1279 && !node->in_other_partition
1280 && !node->clones
1281 && !DECL_EXTERNAL (decl))
1282 {
1283 node->debug ();
1284 internal_error ("failed to reclaim unneeded function in same "
1285 "comdat group");
1286 }
1287 }
1288 #endif
1289 }
1290
1291 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1292 in lowered gimple form. IN_SSA is true if the gimple is in SSA.
1293
1294 Set current_function_decl and cfun to newly constructed empty function body.
1295 return basic block in the function body. */
1296
1297 basic_block
1298 init_lowered_empty_function (tree decl, bool in_ssa)
1299 {
1300 basic_block bb;
1301
1302 current_function_decl = decl;
1303 allocate_struct_function (decl, false);
1304 gimple_register_cfg_hooks ();
1305 init_empty_tree_cfg ();
1306
1307 if (in_ssa)
1308 {
1309 init_tree_ssa (cfun);
1310 init_ssa_operands (cfun);
1311 cfun->gimple_df->in_ssa_p = true;
1312 cfun->curr_properties |= PROP_ssa;
1313 }
1314
1315 DECL_INITIAL (decl) = make_node (BLOCK);
1316
1317 DECL_SAVED_TREE (decl) = error_mark_node;
1318 cfun->curr_properties |= (PROP_gimple_lcf | PROP_gimple_leh | PROP_gimple_any
1319 | PROP_cfg | PROP_loops);
1320
1321 set_loops_for_fn (cfun, ggc_cleared_alloc<loops> ());
1322 init_loops_structure (cfun, loops_for_fn (cfun), 1);
1323 loops_for_fn (cfun)->state |= LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
1324
1325 /* Create BB for body of the function and connect it properly. */
1326 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR_FOR_FN (cfun));
1327 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, EDGE_FALLTHRU);
1328 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1329 add_bb_to_loop (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
1330
1331 return bb;
1332 }
1333
1334 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1335 offset indicated by VIRTUAL_OFFSET, if that is
1336 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1337 zero for a result adjusting thunk. */
1338
1339 static tree
1340 thunk_adjust (gimple_stmt_iterator * bsi,
1341 tree ptr, bool this_adjusting,
1342 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1343 {
1344 gimple stmt;
1345 tree ret;
1346
1347 if (this_adjusting
1348 && fixed_offset != 0)
1349 {
1350 stmt = gimple_build_assign
1351 (ptr, fold_build_pointer_plus_hwi_loc (input_location,
1352 ptr,
1353 fixed_offset));
1354 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1355 }
1356
1357 /* If there's a virtual offset, look up that value in the vtable and
1358 adjust the pointer again. */
1359 if (virtual_offset)
1360 {
1361 tree vtabletmp;
1362 tree vtabletmp2;
1363 tree vtabletmp3;
1364
1365 if (!vtable_entry_type)
1366 {
1367 tree vfunc_type = make_node (FUNCTION_TYPE);
1368 TREE_TYPE (vfunc_type) = integer_type_node;
1369 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1370 layout_type (vfunc_type);
1371
1372 vtable_entry_type = build_pointer_type (vfunc_type);
1373 }
1374
1375 vtabletmp =
1376 create_tmp_reg (build_pointer_type
1377 (build_pointer_type (vtable_entry_type)), "vptr");
1378
1379 /* The vptr is always at offset zero in the object. */
1380 stmt = gimple_build_assign (vtabletmp,
1381 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1382 ptr));
1383 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1384
1385 /* Form the vtable address. */
1386 vtabletmp2 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp)),
1387 "vtableaddr");
1388 stmt = gimple_build_assign (vtabletmp2,
1389 build_simple_mem_ref (vtabletmp));
1390 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1391
1392 /* Find the entry with the vcall offset. */
1393 stmt = gimple_build_assign (vtabletmp2,
1394 fold_build_pointer_plus_loc (input_location,
1395 vtabletmp2,
1396 virtual_offset));
1397 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1398
1399 /* Get the offset itself. */
1400 vtabletmp3 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1401 "vcalloffset");
1402 stmt = gimple_build_assign (vtabletmp3,
1403 build_simple_mem_ref (vtabletmp2));
1404 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1405
1406 /* Adjust the `this' pointer. */
1407 ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
1408 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1409 GSI_CONTINUE_LINKING);
1410 }
1411
1412 if (!this_adjusting
1413 && fixed_offset != 0)
1414 /* Adjust the pointer by the constant. */
1415 {
1416 tree ptrtmp;
1417
1418 if (TREE_CODE (ptr) == VAR_DECL)
1419 ptrtmp = ptr;
1420 else
1421 {
1422 ptrtmp = create_tmp_reg (TREE_TYPE (ptr), "ptr");
1423 stmt = gimple_build_assign (ptrtmp, ptr);
1424 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1425 }
1426 ptr = fold_build_pointer_plus_hwi_loc (input_location,
1427 ptrtmp, fixed_offset);
1428 }
1429
1430 /* Emit the statement and gimplify the adjustment expression. */
1431 ret = create_tmp_reg (TREE_TYPE (ptr), "adjusted_this");
1432 stmt = gimple_build_assign (ret, ptr);
1433 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1434
1435 return ret;
1436 }
1437
1438 /* Expand thunk NODE to gimple if possible.
1439 When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
1440 no assembler is produced.
1441 When OUTPUT_ASM_THUNK is true, also produce assembler for
1442 thunks that are not lowered. */
1443
1444 bool
1445 cgraph_node::expand_thunk (bool output_asm_thunks, bool force_gimple_thunk)
1446 {
1447 bool this_adjusting = thunk.this_adjusting;
1448 HOST_WIDE_INT fixed_offset = thunk.fixed_offset;
1449 HOST_WIDE_INT virtual_value = thunk.virtual_value;
1450 tree virtual_offset = NULL;
1451 tree alias = callees->callee->decl;
1452 tree thunk_fndecl = decl;
1453 tree a;
1454
1455
1456 if (!force_gimple_thunk && this_adjusting
1457 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1458 virtual_value, alias))
1459 {
1460 const char *fnname;
1461 tree fn_block;
1462 tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1463
1464 if (!output_asm_thunks)
1465 {
1466 analyzed = true;
1467 return false;
1468 }
1469
1470 if (in_lto_p)
1471 get_untransformed_body ();
1472 a = DECL_ARGUMENTS (thunk_fndecl);
1473
1474 current_function_decl = thunk_fndecl;
1475
1476 /* Ensure thunks are emitted in their correct sections. */
1477 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1478
1479 DECL_RESULT (thunk_fndecl)
1480 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1481 RESULT_DECL, 0, restype);
1482 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1483 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1484
1485 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1486 create one. */
1487 fn_block = make_node (BLOCK);
1488 BLOCK_VARS (fn_block) = a;
1489 DECL_INITIAL (thunk_fndecl) = fn_block;
1490 init_function_start (thunk_fndecl);
1491 cfun->is_thunk = 1;
1492 insn_locations_init ();
1493 set_curr_insn_location (DECL_SOURCE_LOCATION (thunk_fndecl));
1494 prologue_location = curr_insn_location ();
1495 assemble_start_function (thunk_fndecl, fnname);
1496
1497 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1498 fixed_offset, virtual_value, alias);
1499
1500 assemble_end_function (thunk_fndecl, fnname);
1501 insn_locations_finalize ();
1502 init_insn_lengths ();
1503 free_after_compilation (cfun);
1504 set_cfun (NULL);
1505 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1506 thunk.thunk_p = false;
1507 analyzed = false;
1508 }
1509 else
1510 {
1511 tree restype;
1512 basic_block bb, then_bb, else_bb, return_bb;
1513 gimple_stmt_iterator bsi;
1514 int nargs = 0;
1515 tree arg;
1516 int i;
1517 tree resdecl;
1518 tree restmp = NULL;
1519
1520 gimple call;
1521 gimple ret;
1522
1523 if (in_lto_p)
1524 get_untransformed_body ();
1525 a = DECL_ARGUMENTS (thunk_fndecl);
1526
1527 current_function_decl = thunk_fndecl;
1528
1529 /* Ensure thunks are emitted in their correct sections. */
1530 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1531
1532 DECL_IGNORED_P (thunk_fndecl) = 1;
1533 bitmap_obstack_initialize (NULL);
1534
1535 if (thunk.virtual_offset_p)
1536 virtual_offset = size_int (virtual_value);
1537
1538 /* Build the return declaration for the function. */
1539 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1540 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1541 {
1542 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1543 DECL_ARTIFICIAL (resdecl) = 1;
1544 DECL_IGNORED_P (resdecl) = 1;
1545 DECL_RESULT (thunk_fndecl) = resdecl;
1546 DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1547 }
1548 else
1549 resdecl = DECL_RESULT (thunk_fndecl);
1550
1551 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl, true);
1552
1553 bsi = gsi_start_bb (bb);
1554
1555 /* Build call to the function being thunked. */
1556 if (!VOID_TYPE_P (restype))
1557 {
1558 if (DECL_BY_REFERENCE (resdecl))
1559 {
1560 restmp = gimple_fold_indirect_ref (resdecl);
1561 if (!restmp)
1562 restmp = build2 (MEM_REF,
1563 TREE_TYPE (TREE_TYPE (DECL_RESULT (alias))),
1564 resdecl,
1565 build_int_cst (TREE_TYPE
1566 (DECL_RESULT (alias)), 0));
1567 }
1568 else if (!is_gimple_reg_type (restype))
1569 {
1570 restmp = resdecl;
1571
1572 if (TREE_CODE (restmp) == VAR_DECL)
1573 add_local_decl (cfun, restmp);
1574 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1575 }
1576 else
1577 restmp = create_tmp_reg (restype, "retval");
1578 }
1579
1580 for (arg = a; arg; arg = DECL_CHAIN (arg))
1581 nargs++;
1582 auto_vec<tree> vargs (nargs);
1583 if (this_adjusting)
1584 vargs.quick_push (thunk_adjust (&bsi, a, 1, fixed_offset,
1585 virtual_offset));
1586 else if (nargs)
1587 vargs.quick_push (a);
1588
1589 if (nargs)
1590 for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
1591 {
1592 tree tmp = arg;
1593 if (!is_gimple_val (arg))
1594 {
1595 tmp = create_tmp_reg (TYPE_MAIN_VARIANT
1596 (TREE_TYPE (arg)), "arg");
1597 gimple stmt = gimple_build_assign (tmp, arg);
1598 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1599 }
1600 vargs.quick_push (tmp);
1601 }
1602 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1603 callees->call_stmt = call;
1604 gimple_call_set_from_thunk (call, true);
1605 gimple_call_set_with_bounds (call, instrumentation_clone);
1606 if (restmp)
1607 {
1608 gimple_call_set_lhs (call, restmp);
1609 gcc_assert (useless_type_conversion_p (TREE_TYPE (restmp),
1610 TREE_TYPE (TREE_TYPE (alias))));
1611 }
1612 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1613 if (!(gimple_call_flags (call) & ECF_NORETURN))
1614 {
1615 if (restmp && !this_adjusting
1616 && (fixed_offset || virtual_offset))
1617 {
1618 tree true_label = NULL_TREE;
1619
1620 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1621 {
1622 gimple stmt;
1623 /* If the return type is a pointer, we need to
1624 protect against NULL. We know there will be an
1625 adjustment, because that's why we're emitting a
1626 thunk. */
1627 then_bb = create_basic_block (NULL, (void *) 0, bb);
1628 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1629 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1630 add_bb_to_loop (then_bb, bb->loop_father);
1631 add_bb_to_loop (return_bb, bb->loop_father);
1632 add_bb_to_loop (else_bb, bb->loop_father);
1633 remove_edge (single_succ_edge (bb));
1634 true_label = gimple_block_label (then_bb);
1635 stmt = gimple_build_cond (NE_EXPR, restmp,
1636 build_zero_cst (TREE_TYPE (restmp)),
1637 NULL_TREE, NULL_TREE);
1638 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1639 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1640 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1641 make_edge (return_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1642 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1643 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1644 bsi = gsi_last_bb (then_bb);
1645 }
1646
1647 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1648 fixed_offset, virtual_offset);
1649 if (true_label)
1650 {
1651 gimple stmt;
1652 bsi = gsi_last_bb (else_bb);
1653 stmt = gimple_build_assign (restmp,
1654 build_zero_cst (TREE_TYPE (restmp)));
1655 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1656 bsi = gsi_last_bb (return_bb);
1657 }
1658 }
1659 else
1660 gimple_call_set_tail (call, true);
1661
1662 /* Build return value. */
1663 if (!DECL_BY_REFERENCE (resdecl))
1664 ret = gimple_build_return (restmp);
1665 else
1666 ret = gimple_build_return (resdecl);
1667
1668 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1669 }
1670 else
1671 {
1672 gimple_call_set_tail (call, true);
1673 remove_edge (single_succ_edge (bb));
1674 }
1675
1676 cfun->gimple_df->in_ssa_p = true;
1677 /* FIXME: C++ FE should stop setting TREE_ASM_WRITTEN on thunks. */
1678 TREE_ASM_WRITTEN (thunk_fndecl) = false;
1679 delete_unreachable_blocks ();
1680 update_ssa (TODO_update_ssa);
1681 #ifdef ENABLE_CHECKING
1682 verify_flow_info ();
1683 #endif
1684 free_dominance_info (CDI_DOMINATORS);
1685
1686 /* Since we want to emit the thunk, we explicitly mark its name as
1687 referenced. */
1688 thunk.thunk_p = false;
1689 lowered = true;
1690 bitmap_obstack_release (NULL);
1691 }
1692 current_function_decl = NULL;
1693 set_cfun (NULL);
1694 return true;
1695 }
1696
1697 /* Assemble thunks and aliases associated to node. */
1698
1699 void
1700 cgraph_node::assemble_thunks_and_aliases (void)
1701 {
1702 cgraph_edge *e;
1703 ipa_ref *ref;
1704
1705 for (e = callers; e;)
1706 if (e->caller->thunk.thunk_p
1707 && !e->caller->thunk.add_pointer_bounds_args)
1708 {
1709 cgraph_node *thunk = e->caller;
1710
1711 e = e->next_caller;
1712 thunk->expand_thunk (true, false);
1713 thunk->assemble_thunks_and_aliases ();
1714 }
1715 else
1716 e = e->next_caller;
1717
1718 FOR_EACH_ALIAS (this, ref)
1719 {
1720 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
1721 bool saved_written = TREE_ASM_WRITTEN (decl);
1722
1723 /* Force assemble_alias to really output the alias this time instead
1724 of buffering it in same alias pairs. */
1725 TREE_ASM_WRITTEN (decl) = 1;
1726 do_assemble_alias (alias->decl,
1727 DECL_ASSEMBLER_NAME (decl));
1728 alias->assemble_thunks_and_aliases ();
1729 TREE_ASM_WRITTEN (decl) = saved_written;
1730 }
1731 }
1732
1733 /* Expand function specified by node. */
1734
1735 void
1736 cgraph_node::expand (void)
1737 {
1738 location_t saved_loc;
1739
1740 /* We ought to not compile any inline clones. */
1741 gcc_assert (!global.inlined_to);
1742
1743 announce_function (decl);
1744 process = 0;
1745 gcc_assert (lowered);
1746 get_untransformed_body ();
1747
1748 /* Generate RTL for the body of DECL. */
1749
1750 timevar_push (TV_REST_OF_COMPILATION);
1751
1752 gcc_assert (symtab->global_info_ready);
1753
1754 /* Initialize the default bitmap obstack. */
1755 bitmap_obstack_initialize (NULL);
1756
1757 /* Initialize the RTL code for the function. */
1758 current_function_decl = decl;
1759 saved_loc = input_location;
1760 input_location = DECL_SOURCE_LOCATION (decl);
1761 init_function_start (decl);
1762
1763 gimple_register_cfg_hooks ();
1764
1765 bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
1766
1767 execute_all_ipa_transforms ();
1768
1769 /* Perform all tree transforms and optimizations. */
1770
1771 /* Signal the start of passes. */
1772 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START, NULL);
1773
1774 execute_pass_list (cfun, g->get_passes ()->all_passes);
1775
1776 /* Signal the end of passes. */
1777 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END, NULL);
1778
1779 bitmap_obstack_release (&reg_obstack);
1780
1781 /* Release the default bitmap obstack. */
1782 bitmap_obstack_release (NULL);
1783
1784 /* If requested, warn about function definitions where the function will
1785 return a value (usually of some struct or union type) which itself will
1786 take up a lot of stack space. */
1787 if (warn_larger_than && !DECL_EXTERNAL (decl) && TREE_TYPE (decl))
1788 {
1789 tree ret_type = TREE_TYPE (TREE_TYPE (decl));
1790
1791 if (ret_type && TYPE_SIZE_UNIT (ret_type)
1792 && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
1793 && 0 < compare_tree_int (TYPE_SIZE_UNIT (ret_type),
1794 larger_than_size))
1795 {
1796 unsigned int size_as_int
1797 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
1798
1799 if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
1800 warning (OPT_Wlarger_than_, "size of return value of %q+D is %u bytes",
1801 decl, size_as_int);
1802 else
1803 warning (OPT_Wlarger_than_, "size of return value of %q+D is larger than %wd bytes",
1804 decl, larger_than_size);
1805 }
1806 }
1807
1808 gimple_set_body (decl, NULL);
1809 if (DECL_STRUCT_FUNCTION (decl) == 0
1810 && !cgraph_node::get (decl)->origin)
1811 {
1812 /* Stop pointing to the local nodes about to be freed.
1813 But DECL_INITIAL must remain nonzero so we know this
1814 was an actual function definition.
1815 For a nested function, this is done in c_pop_function_context.
1816 If rest_of_compilation set this to 0, leave it 0. */
1817 if (DECL_INITIAL (decl) != 0)
1818 DECL_INITIAL (decl) = error_mark_node;
1819 }
1820
1821 input_location = saved_loc;
1822
1823 ggc_collect ();
1824 timevar_pop (TV_REST_OF_COMPILATION);
1825
1826 /* Make sure that BE didn't give up on compiling. */
1827 gcc_assert (TREE_ASM_WRITTEN (decl));
1828 set_cfun (NULL);
1829 current_function_decl = NULL;
1830
1831 /* It would make a lot more sense to output thunks before function body to get more
1832 forward and lest backwarding jumps. This however would need solving problem
1833 with comdats. See PR48668. Also aliases must come after function itself to
1834 make one pass assemblers, like one on AIX, happy. See PR 50689.
1835 FIXME: Perhaps thunks should be move before function IFF they are not in comdat
1836 groups. */
1837 assemble_thunks_and_aliases ();
1838 release_body ();
1839 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1840 points to the dead function body. */
1841 remove_callees ();
1842 remove_all_references ();
1843 }
1844
1845 /* Node comparer that is responsible for the order that corresponds
1846 to time when a function was launched for the first time. */
1847
1848 static int
1849 node_cmp (const void *pa, const void *pb)
1850 {
1851 const cgraph_node *a = *(const cgraph_node * const *) pa;
1852 const cgraph_node *b = *(const cgraph_node * const *) pb;
1853
1854 /* Functions with time profile must be before these without profile. */
1855 if (!a->tp_first_run || !b->tp_first_run)
1856 return a->tp_first_run - b->tp_first_run;
1857
1858 return a->tp_first_run != b->tp_first_run
1859 ? b->tp_first_run - a->tp_first_run
1860 : b->order - a->order;
1861 }
1862
1863 /* Expand all functions that must be output.
1864
1865 Attempt to topologically sort the nodes so function is output when
1866 all called functions are already assembled to allow data to be
1867 propagated across the callgraph. Use a stack to get smaller distance
1868 between a function and its callees (later we may choose to use a more
1869 sophisticated algorithm for function reordering; we will likely want
1870 to use subsections to make the output functions appear in top-down
1871 order). */
1872
1873 static void
1874 expand_all_functions (void)
1875 {
1876 cgraph_node *node;
1877 cgraph_node **order = XCNEWVEC (cgraph_node *,
1878 symtab->cgraph_count);
1879 unsigned int expanded_func_count = 0, profiled_func_count = 0;
1880 int order_pos, new_order_pos = 0;
1881 int i;
1882
1883 order_pos = ipa_reverse_postorder (order);
1884 gcc_assert (order_pos == symtab->cgraph_count);
1885
1886 /* Garbage collector may remove inline clones we eliminate during
1887 optimization. So we must be sure to not reference them. */
1888 for (i = 0; i < order_pos; i++)
1889 if (order[i]->process)
1890 order[new_order_pos++] = order[i];
1891
1892 if (flag_profile_reorder_functions)
1893 qsort (order, new_order_pos, sizeof (cgraph_node *), node_cmp);
1894
1895 for (i = new_order_pos - 1; i >= 0; i--)
1896 {
1897 node = order[i];
1898
1899 if (node->process)
1900 {
1901 expanded_func_count++;
1902 if(node->tp_first_run)
1903 profiled_func_count++;
1904
1905 if (symtab->dump_file)
1906 fprintf (symtab->dump_file,
1907 "Time profile order in expand_all_functions:%s:%d\n",
1908 node->asm_name (), node->tp_first_run);
1909 node->process = 0;
1910 node->expand ();
1911 }
1912 }
1913
1914 if (dump_file)
1915 fprintf (dump_file, "Expanded functions with time profile (%s):%u/%u\n",
1916 main_input_filename, profiled_func_count, expanded_func_count);
1917
1918 if (symtab->dump_file && flag_profile_reorder_functions)
1919 fprintf (symtab->dump_file, "Expanded functions with time profile:%u/%u\n",
1920 profiled_func_count, expanded_func_count);
1921
1922 symtab->process_new_functions ();
1923 free_gimplify_stack ();
1924
1925 free (order);
1926 }
1927
1928 /* This is used to sort the node types by the cgraph order number. */
1929
1930 enum cgraph_order_sort_kind
1931 {
1932 ORDER_UNDEFINED = 0,
1933 ORDER_FUNCTION,
1934 ORDER_VAR,
1935 ORDER_ASM
1936 };
1937
1938 struct cgraph_order_sort
1939 {
1940 enum cgraph_order_sort_kind kind;
1941 union
1942 {
1943 cgraph_node *f;
1944 varpool_node *v;
1945 asm_node *a;
1946 } u;
1947 };
1948
1949 /* Output all functions, variables, and asm statements in the order
1950 according to their order fields, which is the order in which they
1951 appeared in the file. This implements -fno-toplevel-reorder. In
1952 this mode we may output functions and variables which don't really
1953 need to be output.
1954 When NO_REORDER is true only do this for symbols marked no reorder. */
1955
1956 static void
1957 output_in_order (bool no_reorder)
1958 {
1959 int max;
1960 cgraph_order_sort *nodes;
1961 int i;
1962 cgraph_node *pf;
1963 varpool_node *pv;
1964 asm_node *pa;
1965 max = symtab->order;
1966 nodes = XCNEWVEC (cgraph_order_sort, max);
1967
1968 FOR_EACH_DEFINED_FUNCTION (pf)
1969 {
1970 if (pf->process && !pf->thunk.thunk_p && !pf->alias)
1971 {
1972 if (no_reorder && !pf->no_reorder)
1973 continue;
1974 i = pf->order;
1975 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1976 nodes[i].kind = ORDER_FUNCTION;
1977 nodes[i].u.f = pf;
1978 }
1979 }
1980
1981 FOR_EACH_DEFINED_VARIABLE (pv)
1982 if (!DECL_EXTERNAL (pv->decl))
1983 {
1984 if (no_reorder && !pv->no_reorder)
1985 continue;
1986 i = pv->order;
1987 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1988 nodes[i].kind = ORDER_VAR;
1989 nodes[i].u.v = pv;
1990 }
1991
1992 for (pa = symtab->first_asm_symbol (); pa; pa = pa->next)
1993 {
1994 i = pa->order;
1995 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1996 nodes[i].kind = ORDER_ASM;
1997 nodes[i].u.a = pa;
1998 }
1999
2000 /* In toplevel reorder mode we output all statics; mark them as needed. */
2001
2002 for (i = 0; i < max; ++i)
2003 if (nodes[i].kind == ORDER_VAR)
2004 nodes[i].u.v->finalize_named_section_flags ();
2005
2006 for (i = 0; i < max; ++i)
2007 {
2008 switch (nodes[i].kind)
2009 {
2010 case ORDER_FUNCTION:
2011 nodes[i].u.f->process = 0;
2012 nodes[i].u.f->expand ();
2013 break;
2014
2015 case ORDER_VAR:
2016 nodes[i].u.v->assemble_decl ();
2017 break;
2018
2019 case ORDER_ASM:
2020 assemble_asm (nodes[i].u.a->asm_str);
2021 break;
2022
2023 case ORDER_UNDEFINED:
2024 break;
2025
2026 default:
2027 gcc_unreachable ();
2028 }
2029 }
2030
2031 symtab->clear_asm_symbols ();
2032
2033 free (nodes);
2034 }
2035
2036 static void
2037 ipa_passes (void)
2038 {
2039 gcc::pass_manager *passes = g->get_passes ();
2040
2041 set_cfun (NULL);
2042 current_function_decl = NULL;
2043 gimple_register_cfg_hooks ();
2044 bitmap_obstack_initialize (NULL);
2045
2046 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
2047
2048 if (!in_lto_p)
2049 {
2050 execute_ipa_pass_list (passes->all_small_ipa_passes);
2051 if (seen_error ())
2052 return;
2053 }
2054
2055 /* This extra symtab_remove_unreachable_nodes pass tends to catch some
2056 devirtualization and other changes where removal iterate. */
2057 symtab->remove_unreachable_nodes (true, symtab->dump_file);
2058
2059 /* If pass_all_early_optimizations was not scheduled, the state of
2060 the cgraph will not be properly updated. Update it now. */
2061 if (symtab->state < IPA_SSA)
2062 symtab->state = IPA_SSA;
2063
2064 if (!in_lto_p)
2065 {
2066 /* Generate coverage variables and constructors. */
2067 coverage_finish ();
2068
2069 /* Process new functions added. */
2070 set_cfun (NULL);
2071 current_function_decl = NULL;
2072 symtab->process_new_functions ();
2073
2074 execute_ipa_summary_passes
2075 ((ipa_opt_pass_d *) passes->all_regular_ipa_passes);
2076 }
2077
2078 /* Some targets need to handle LTO assembler output specially. */
2079 if (flag_generate_lto)
2080 targetm.asm_out.lto_start ();
2081
2082 if (!in_lto_p)
2083 {
2084 if (g->have_offload)
2085 {
2086 section_name_prefix = OFFLOAD_SECTION_NAME_PREFIX;
2087 ipa_write_summaries (true);
2088 }
2089 if (flag_lto)
2090 {
2091 section_name_prefix = LTO_SECTION_NAME_PREFIX;
2092 ipa_write_summaries (false);
2093 }
2094 }
2095
2096 if (flag_generate_lto)
2097 targetm.asm_out.lto_end ();
2098
2099 if (!flag_ltrans && (in_lto_p || !flag_lto || flag_fat_lto_objects))
2100 execute_ipa_pass_list (passes->all_regular_ipa_passes);
2101 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
2102
2103 bitmap_obstack_release (NULL);
2104 }
2105
2106
2107 /* Return string alias is alias of. */
2108
2109 static tree
2110 get_alias_symbol (tree decl)
2111 {
2112 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
2113 return get_identifier (TREE_STRING_POINTER
2114 (TREE_VALUE (TREE_VALUE (alias))));
2115 }
2116
2117
2118 /* Weakrefs may be associated to external decls and thus not output
2119 at expansion time. Emit all necessary aliases. */
2120
2121 void
2122 symbol_table::output_weakrefs (void)
2123 {
2124 symtab_node *node;
2125 cgraph_node *cnode;
2126 FOR_EACH_SYMBOL (node)
2127 if (node->alias
2128 && !TREE_ASM_WRITTEN (node->decl)
2129 && (!(cnode = dyn_cast <cgraph_node *> (node))
2130 || !cnode->instrumented_version
2131 || !TREE_ASM_WRITTEN (cnode->instrumented_version->decl))
2132 && node->weakref)
2133 {
2134 tree target;
2135
2136 /* Weakrefs are special by not requiring target definition in current
2137 compilation unit. It is thus bit hard to work out what we want to
2138 alias.
2139 When alias target is defined, we need to fetch it from symtab reference,
2140 otherwise it is pointed to by alias_target. */
2141 if (node->alias_target)
2142 target = (DECL_P (node->alias_target)
2143 ? DECL_ASSEMBLER_NAME (node->alias_target)
2144 : node->alias_target);
2145 else if (node->analyzed)
2146 target = DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl);
2147 else
2148 {
2149 gcc_unreachable ();
2150 target = get_alias_symbol (node->decl);
2151 }
2152 do_assemble_alias (node->decl, target);
2153 }
2154 }
2155
2156 /* Perform simple optimizations based on callgraph. */
2157
2158 void
2159 symbol_table::compile (void)
2160 {
2161 if (seen_error ())
2162 return;
2163
2164 #ifdef ENABLE_CHECKING
2165 symtab_node::verify_symtab_nodes ();
2166 #endif
2167
2168 timevar_push (TV_CGRAPHOPT);
2169 if (pre_ipa_mem_report)
2170 {
2171 fprintf (stderr, "Memory consumption before IPA\n");
2172 dump_memory_report (false);
2173 }
2174 if (!quiet_flag)
2175 fprintf (stderr, "Performing interprocedural optimizations\n");
2176 state = IPA;
2177
2178 /* Offloading requires LTO infrastructure. */
2179 if (!in_lto_p && g->have_offload)
2180 flag_generate_lto = 1;
2181
2182 /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
2183 if (flag_generate_lto)
2184 lto_streamer_hooks_init ();
2185
2186 /* Don't run the IPA passes if there was any error or sorry messages. */
2187 if (!seen_error ())
2188 ipa_passes ();
2189
2190 /* Do nothing else if any IPA pass found errors or if we are just streaming LTO. */
2191 if (seen_error ()
2192 || (!in_lto_p && flag_lto && !flag_fat_lto_objects))
2193 {
2194 timevar_pop (TV_CGRAPHOPT);
2195 return;
2196 }
2197
2198 /* This pass remove bodies of extern inline functions we never inlined.
2199 Do this later so other IPA passes see what is really going on.
2200 FIXME: This should be run just after inlining by pasmanager. */
2201 remove_unreachable_nodes (false, dump_file);
2202 global_info_ready = true;
2203 if (dump_file)
2204 {
2205 fprintf (dump_file, "Optimized ");
2206 symtab_node:: dump_table (dump_file);
2207 }
2208 if (post_ipa_mem_report)
2209 {
2210 fprintf (stderr, "Memory consumption after IPA\n");
2211 dump_memory_report (false);
2212 }
2213 timevar_pop (TV_CGRAPHOPT);
2214
2215 /* Output everything. */
2216 (*debug_hooks->assembly_start) ();
2217 if (!quiet_flag)
2218 fprintf (stderr, "Assembling functions:\n");
2219 #ifdef ENABLE_CHECKING
2220 symtab_node::verify_symtab_nodes ();
2221 #endif
2222
2223 materialize_all_clones ();
2224 bitmap_obstack_initialize (NULL);
2225 execute_ipa_pass_list (g->get_passes ()->all_late_ipa_passes);
2226 bitmap_obstack_release (NULL);
2227 mark_functions_to_output ();
2228
2229 /* When weakref support is missing, we autmatically translate all
2230 references to NODE to references to its ultimate alias target.
2231 The renaming mechanizm uses flag IDENTIFIER_TRANSPARENT_ALIAS and
2232 TREE_CHAIN.
2233
2234 Set up this mapping before we output any assembler but once we are sure
2235 that all symbol renaming is done.
2236
2237 FIXME: All this uglyness can go away if we just do renaming at gimple
2238 level by physically rewritting the IL. At the moment we can only redirect
2239 calls, so we need infrastructure for renaming references as well. */
2240 #ifndef ASM_OUTPUT_WEAKREF
2241 symtab_node *node;
2242
2243 FOR_EACH_SYMBOL (node)
2244 if (node->alias
2245 && lookup_attribute ("weakref", DECL_ATTRIBUTES (node->decl)))
2246 {
2247 IDENTIFIER_TRANSPARENT_ALIAS
2248 (DECL_ASSEMBLER_NAME (node->decl)) = 1;
2249 TREE_CHAIN (DECL_ASSEMBLER_NAME (node->decl))
2250 = (node->alias_target ? node->alias_target
2251 : DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl));
2252 }
2253 #endif
2254
2255 state = EXPANSION;
2256
2257 if (!flag_toplevel_reorder)
2258 output_in_order (false);
2259 else
2260 {
2261 /* Output first asm statements and anything ordered. The process
2262 flag is cleared for these nodes, so we skip them later. */
2263 output_in_order (true);
2264 expand_all_functions ();
2265 output_variables ();
2266 }
2267
2268 process_new_functions ();
2269 state = FINISHED;
2270 output_weakrefs ();
2271
2272 if (dump_file)
2273 {
2274 fprintf (dump_file, "\nFinal ");
2275 symtab_node::dump_table (dump_file);
2276 }
2277 #ifdef ENABLE_CHECKING
2278 symtab_node::verify_symtab_nodes ();
2279 /* Double check that all inline clones are gone and that all
2280 function bodies have been released from memory. */
2281 if (!seen_error ())
2282 {
2283 cgraph_node *node;
2284 bool error_found = false;
2285
2286 FOR_EACH_DEFINED_FUNCTION (node)
2287 if (node->global.inlined_to
2288 || gimple_has_body_p (node->decl))
2289 {
2290 error_found = true;
2291 node->debug ();
2292 }
2293 if (error_found)
2294 internal_error ("nodes with unreleased memory found");
2295 }
2296 #endif
2297 }
2298
2299
2300 /* Analyze the whole compilation unit once it is parsed completely. */
2301
2302 void
2303 symbol_table::finalize_compilation_unit (void)
2304 {
2305 timevar_push (TV_CGRAPH);
2306
2307 /* If we're here there's no current function anymore. Some frontends
2308 are lazy in clearing these. */
2309 current_function_decl = NULL;
2310 set_cfun (NULL);
2311
2312 /* Do not skip analyzing the functions if there were errors, we
2313 miss diagnostics for following functions otherwise. */
2314
2315 /* Emit size functions we didn't inline. */
2316 finalize_size_functions ();
2317
2318 /* Mark alias targets necessary and emit diagnostics. */
2319 handle_alias_pairs ();
2320
2321 if (!quiet_flag)
2322 {
2323 fprintf (stderr, "\nAnalyzing compilation unit\n");
2324 fflush (stderr);
2325 }
2326
2327 if (flag_dump_passes)
2328 dump_passes ();
2329
2330 /* Gimplify and lower all functions, compute reachability and
2331 remove unreachable nodes. */
2332 analyze_functions ();
2333
2334 /* Mark alias targets necessary and emit diagnostics. */
2335 handle_alias_pairs ();
2336
2337 /* Gimplify and lower thunks. */
2338 analyze_functions ();
2339
2340 /* Finally drive the pass manager. */
2341 compile ();
2342
2343 timevar_pop (TV_CGRAPH);
2344 }
2345
2346 /* Reset all state within cgraphunit.c so that we can rerun the compiler
2347 within the same process. For use by toplev::finalize. */
2348
2349 void
2350 cgraphunit_c_finalize (void)
2351 {
2352 gcc_assert (cgraph_new_nodes.length () == 0);
2353 cgraph_new_nodes.truncate (0);
2354
2355 vtable_entry_type = NULL;
2356 queued_nodes = &symtab_terminator;
2357
2358 first_analyzed = NULL;
2359 first_analyzed_var = NULL;
2360 }
2361
2362 /* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
2363 kind of wrapper method. */
2364
2365 void
2366 cgraph_node::create_wrapper (cgraph_node *target)
2367 {
2368 /* Preserve DECL_RESULT so we get right by reference flag. */
2369 tree decl_result = DECL_RESULT (decl);
2370
2371 /* Remove the function's body but keep arguments to be reused
2372 for thunk. */
2373 release_body (true);
2374 reset ();
2375
2376 DECL_RESULT (decl) = decl_result;
2377 DECL_INITIAL (decl) = NULL;
2378 allocate_struct_function (decl, false);
2379 set_cfun (NULL);
2380
2381 /* Turn alias into thunk and expand it into GIMPLE representation. */
2382 definition = true;
2383 thunk.thunk_p = true;
2384 thunk.this_adjusting = false;
2385
2386 cgraph_edge *e = create_edge (target, NULL, 0, CGRAPH_FREQ_BASE);
2387
2388 tree arguments = DECL_ARGUMENTS (decl);
2389
2390 while (arguments)
2391 {
2392 TREE_ADDRESSABLE (arguments) = false;
2393 arguments = TREE_CHAIN (arguments);
2394 }
2395
2396 expand_thunk (false, true);
2397 e->call_stmt_cannot_inline_p = true;
2398
2399 /* Inline summary set-up. */
2400 analyze ();
2401 inline_analyze_function (this);
2402 }
2403
2404 #include "gt-cgraphunit.h"