lto-symtab.c (lto_symtab_merge_cgraph_nodes): Resolve cross module weakrefs.
[gcc.git] / gcc / cgraphunit.c
1 /* Driver of optimization process
2 Copyright (C) 2003-2013 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This module implements main driver of compilation process.
22
23 The main scope of this file is to act as an interface in between
24 tree based frontends and the backend.
25
26 The front-end is supposed to use following functionality:
27
28 - cgraph_finalize_function
29
30 This function is called once front-end has parsed whole body of function
31 and it is certain that the function body nor the declaration will change.
32
33 (There is one exception needed for implementing GCC extern inline
34 function.)
35
36 - varpool_finalize_decl
37
38 This function has same behavior as the above but is used for static
39 variables.
40
41 - add_asm_node
42
43 Insert new toplevel ASM statement
44
45 - finalize_compilation_unit
46
47 This function is called once (source level) compilation unit is finalized
48 and it will no longer change.
49
50 The symbol table is constructed starting from the trivially needed
51 symbols finalized by the frontend. Functions are lowered into
52 GIMPLE representation and callgraph/reference lists are constructed.
53 Those are used to discover other necessary functions and variables.
54
55 At the end the bodies of unreachable functions are removed.
56
57 The function can be called multiple times when multiple source level
58 compilation units are combined.
59
60 - compile
61
62 This passes control to the back-end. Optimizations are performed and
63 final assembler is generated. This is done in the following way. Note
64 that with link time optimization the process is split into three
65 stages (compile time, linktime analysis and parallel linktime as
66 indicated bellow).
67
68 Compile time:
69
70 1) Inter-procedural optimization.
71 (ipa_passes)
72
73 This part is further split into:
74
75 a) early optimizations. These are local passes executed in
76 the topological order on the callgraph.
77
78 The purpose of early optimiations is to optimize away simple
79 things that may otherwise confuse IP analysis. Very simple
80 propagation across the callgraph is done i.e. to discover
81 functions without side effects and simple inlining is performed.
82
83 b) early small interprocedural passes.
84
85 Those are interprocedural passes executed only at compilation
86 time. These include, for exmaple, transational memory lowering,
87 unreachable code removal and other simple transformations.
88
89 c) IP analysis stage. All interprocedural passes do their
90 analysis.
91
92 Interprocedural passes differ from small interprocedural
93 passes by their ability to operate across whole program
94 at linktime. Their analysis stage is performed early to
95 both reduce linking times and linktime memory usage by
96 not having to represent whole program in memory.
97
98 d) LTO sreaming. When doing LTO, everything important gets
99 streamed into the object file.
100
101 Compile time and or linktime analysis stage (WPA):
102
103 At linktime units gets streamed back and symbol table is
104 merged. Function bodies are not streamed in and not
105 available.
106 e) IP propagation stage. All IP passes execute their
107 IP propagation. This is done based on the earlier analysis
108 without having function bodies at hand.
109 f) Ltrans streaming. When doing WHOPR LTO, the program
110 is partitioned and streamed into multple object files.
111
112 Compile time and/or parallel linktime stage (ltrans)
113
114 Each of the object files is streamed back and compiled
115 separately. Now the function bodies becomes available
116 again.
117
118 2) Virtual clone materialization
119 (cgraph_materialize_clone)
120
121 IP passes can produce copies of existing functoins (such
122 as versioned clones or inline clones) without actually
123 manipulating their bodies by creating virtual clones in
124 the callgraph. At this time the virtual clones are
125 turned into real functions
126 3) IP transformation
127
128 All IP passes transform function bodies based on earlier
129 decision of the IP propagation.
130
131 4) late small IP passes
132
133 Simple IP passes working within single program partition.
134
135 5) Expansion
136 (expand_all_functions)
137
138 At this stage functions that needs to be output into
139 assembler are identified and compiled in topological order
140 6) Output of variables and aliases
141 Now it is known what variable references was not optimized
142 out and thus all variables are output to the file.
143
144 Note that with -fno-toplevel-reorder passes 5 and 6
145 are combined together in cgraph_output_in_order.
146
147 Finally there are functions to manipulate the callgraph from
148 backend.
149 - cgraph_add_new_function is used to add backend produced
150 functions introduced after the unit is finalized.
151 The functions are enqueue for later processing and inserted
152 into callgraph with cgraph_process_new_functions.
153
154 - cgraph_function_versioning
155
156 produces a copy of function into new one (a version)
157 and apply simple transformations
158 */
159
160 #include "config.h"
161 #include "system.h"
162 #include "coretypes.h"
163 #include "tm.h"
164 #include "tree.h"
165 #include "output.h"
166 #include "rtl.h"
167 #include "tree-flow.h"
168 #include "tree-inline.h"
169 #include "langhooks.h"
170 #include "pointer-set.h"
171 #include "toplev.h"
172 #include "flags.h"
173 #include "ggc.h"
174 #include "debug.h"
175 #include "target.h"
176 #include "cgraph.h"
177 #include "diagnostic.h"
178 #include "params.h"
179 #include "fibheap.h"
180 #include "intl.h"
181 #include "function.h"
182 #include "ipa-prop.h"
183 #include "gimple.h"
184 #include "tree-iterator.h"
185 #include "tree-pass.h"
186 #include "tree-dump.h"
187 #include "gimple-pretty-print.h"
188 #include "output.h"
189 #include "coverage.h"
190 #include "plugin.h"
191 #include "ipa-inline.h"
192 #include "ipa-utils.h"
193 #include "lto-streamer.h"
194 #include "except.h"
195 #include "regset.h" /* FIXME: For reg_obstack. */
196
197 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
198 secondary queue used during optimization to accommodate passes that
199 may generate new functions that need to be optimized and expanded. */
200 cgraph_node_set cgraph_new_nodes;
201
202 static void expand_all_functions (void);
203 static void mark_functions_to_output (void);
204 static void expand_function (struct cgraph_node *);
205 static void cgraph_analyze_function (struct cgraph_node *);
206 static void handle_alias_pairs (void);
207
208 FILE *cgraph_dump_file;
209
210 /* Linked list of cgraph asm nodes. */
211 struct asm_node *asm_nodes;
212
213 /* Last node in cgraph_asm_nodes. */
214 static GTY(()) struct asm_node *asm_last_node;
215
216 /* Used for vtable lookup in thunk adjusting. */
217 static GTY (()) tree vtable_entry_type;
218
219 /* Determine if function DECL is trivially needed and should stay in the
220 compilation unit. This is used at the symbol table construction time
221 and differs from later logic removing unnecessary functions that can
222 take into account results of analysis, whole program info etc. */
223
224 static bool
225 cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
226 {
227 /* If the user told us it is used, then it must be so. */
228 if (node->symbol.force_output)
229 return true;
230
231 /* Double check that no one output the function into assembly file
232 early. */
233 gcc_checking_assert (!DECL_ASSEMBLER_NAME_SET_P (decl)
234 || (node->thunk.thunk_p || node->same_body_alias)
235 || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)));
236
237
238 /* Keep constructors, destructors and virtual functions. */
239 if (DECL_STATIC_CONSTRUCTOR (decl)
240 || DECL_STATIC_DESTRUCTOR (decl)
241 || (DECL_VIRTUAL_P (decl)
242 && optimize && (DECL_COMDAT (decl) || DECL_EXTERNAL (decl))))
243 return true;
244
245 /* Externally visible functions must be output. The exception is
246 COMDAT functions that must be output only when they are needed. */
247
248 if (TREE_PUBLIC (decl)
249 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
250 return true;
251
252 return false;
253 }
254
255 /* Head of the queue of nodes to be processed while building callgraph */
256
257 static symtab_node first = (symtab_node)(void *)1;
258
259 /* Add NODE to queue starting at FIRST.
260 The queue is linked via AUX pointers and terminated by pointer to 1. */
261
262 static void
263 enqueue_node (symtab_node node)
264 {
265 if (node->symbol.aux)
266 return;
267 gcc_checking_assert (first);
268 node->symbol.aux = first;
269 first = node;
270 }
271
272 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
273 functions into callgraph in a way so they look like ordinary reachable
274 functions inserted into callgraph already at construction time. */
275
276 bool
277 cgraph_process_new_functions (void)
278 {
279 bool output = false;
280 tree fndecl;
281 struct cgraph_node *node;
282 cgraph_node_set_iterator csi;
283
284 if (!cgraph_new_nodes)
285 return false;
286 handle_alias_pairs ();
287 /* Note that this queue may grow as its being processed, as the new
288 functions may generate new ones. */
289 for (csi = csi_start (cgraph_new_nodes); !csi_end_p (csi); csi_next (&csi))
290 {
291 node = csi_node (csi);
292 fndecl = node->symbol.decl;
293 switch (cgraph_state)
294 {
295 case CGRAPH_STATE_CONSTRUCTION:
296 /* At construction time we just need to finalize function and move
297 it into reachable functions list. */
298
299 cgraph_finalize_function (fndecl, false);
300 output = true;
301 cgraph_call_function_insertion_hooks (node);
302 enqueue_node ((symtab_node) node);
303 break;
304
305 case CGRAPH_STATE_IPA:
306 case CGRAPH_STATE_IPA_SSA:
307 /* When IPA optimization already started, do all essential
308 transformations that has been already performed on the whole
309 cgraph but not on this function. */
310
311 gimple_register_cfg_hooks ();
312 if (!node->analyzed)
313 cgraph_analyze_function (node);
314 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
315 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
316 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
317 /* When not optimizing, be sure we run early local passes anyway
318 to expand OMP. */
319 || !optimize)
320 execute_pass_list (pass_early_local_passes.pass.sub);
321 else
322 compute_inline_parameters (node, true);
323 free_dominance_info (CDI_POST_DOMINATORS);
324 free_dominance_info (CDI_DOMINATORS);
325 pop_cfun ();
326 cgraph_call_function_insertion_hooks (node);
327 break;
328
329 case CGRAPH_STATE_EXPANSION:
330 /* Functions created during expansion shall be compiled
331 directly. */
332 node->process = 0;
333 cgraph_call_function_insertion_hooks (node);
334 expand_function (node);
335 break;
336
337 default:
338 gcc_unreachable ();
339 break;
340 }
341 }
342 free_cgraph_node_set (cgraph_new_nodes);
343 cgraph_new_nodes = NULL;
344 return output;
345 }
346
347 /* As an GCC extension we allow redefinition of the function. The
348 semantics when both copies of bodies differ is not well defined.
349 We replace the old body with new body so in unit at a time mode
350 we always use new body, while in normal mode we may end up with
351 old body inlined into some functions and new body expanded and
352 inlined in others.
353
354 ??? It may make more sense to use one body for inlining and other
355 body for expanding the function but this is difficult to do. */
356
357 static void
358 cgraph_reset_node (struct cgraph_node *node)
359 {
360 /* If node->process is set, then we have already begun whole-unit analysis.
361 This is *not* testing for whether we've already emitted the function.
362 That case can be sort-of legitimately seen with real function redefinition
363 errors. I would argue that the front end should never present us with
364 such a case, but don't enforce that for now. */
365 gcc_assert (!node->process);
366
367 /* Reset our data structures so we can analyze the function again. */
368 memset (&node->local, 0, sizeof (node->local));
369 memset (&node->global, 0, sizeof (node->global));
370 memset (&node->rtl, 0, sizeof (node->rtl));
371 node->analyzed = false;
372 node->local.finalized = false;
373
374 cgraph_node_remove_callees (node);
375 }
376
377 /* Return true when there are references to NODE. */
378
379 static bool
380 referred_to_p (symtab_node node)
381 {
382 struct ipa_ref *ref;
383
384 /* See if there are any references at all. */
385 if (ipa_ref_list_referring_iterate (&node->symbol.ref_list, 0, ref))
386 return true;
387 /* For functions check also calls. */
388 cgraph_node *cn = dyn_cast <cgraph_node> (node);
389 if (cn && cn->callers)
390 return true;
391 return false;
392 }
393
394 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
395 logic in effect. If NESTED is true, then our caller cannot stand to have
396 the garbage collector run at the moment. We would need to either create
397 a new GC context, or just not compile right now. */
398
399 void
400 cgraph_finalize_function (tree decl, bool nested)
401 {
402 struct cgraph_node *node = cgraph_get_create_node (decl);
403
404 if (node->local.finalized)
405 {
406 cgraph_reset_node (node);
407 node->local.redefined_extern_inline = true;
408 }
409
410 notice_global_symbol (decl);
411 node->local.finalized = true;
412 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
413
414 /* With -fkeep-inline-functions we are keeping all inline functions except
415 for extern inline ones. */
416 if (flag_keep_inline_functions
417 && DECL_DECLARED_INLINE_P (decl)
418 && !DECL_EXTERNAL (decl)
419 && !DECL_DISREGARD_INLINE_LIMITS (decl))
420 node->symbol.force_output = 1;
421
422 /* When not optimizing, also output the static functions. (see
423 PR24561), but don't do so for always_inline functions, functions
424 declared inline and nested functions. These were optimized out
425 in the original implementation and it is unclear whether we want
426 to change the behavior here. */
427 if ((!optimize
428 && !node->same_body_alias
429 && !DECL_DISREGARD_INLINE_LIMITS (decl)
430 && !DECL_DECLARED_INLINE_P (decl)
431 && !(DECL_CONTEXT (decl)
432 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL))
433 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
434 node->symbol.force_output = 1;
435
436 /* If we've not yet emitted decl, tell the debug info about it. */
437 if (!TREE_ASM_WRITTEN (decl))
438 (*debug_hooks->deferred_inline_function) (decl);
439
440 /* Possibly warn about unused parameters. */
441 if (warn_unused_parameter)
442 do_warn_unused_parameter (decl);
443
444 if (!nested)
445 ggc_collect ();
446
447 if (cgraph_state == CGRAPH_STATE_CONSTRUCTION
448 && (cgraph_decide_is_function_needed (node, decl)
449 || referred_to_p ((symtab_node)node)))
450 enqueue_node ((symtab_node)node);
451 }
452
453 /* Add the function FNDECL to the call graph.
454 Unlike cgraph_finalize_function, this function is intended to be used
455 by middle end and allows insertion of new function at arbitrary point
456 of compilation. The function can be either in high, low or SSA form
457 GIMPLE.
458
459 The function is assumed to be reachable and have address taken (so no
460 API breaking optimizations are performed on it).
461
462 Main work done by this function is to enqueue the function for later
463 processing to avoid need the passes to be re-entrant. */
464
465 void
466 cgraph_add_new_function (tree fndecl, bool lowered)
467 {
468 struct cgraph_node *node;
469 switch (cgraph_state)
470 {
471 case CGRAPH_STATE_PARSING:
472 cgraph_finalize_function (fndecl, false);
473 break;
474 case CGRAPH_STATE_CONSTRUCTION:
475 /* Just enqueue function to be processed at nearest occurrence. */
476 node = cgraph_create_node (fndecl);
477 if (lowered)
478 node->lowered = true;
479 if (!cgraph_new_nodes)
480 cgraph_new_nodes = cgraph_node_set_new ();
481 cgraph_node_set_add (cgraph_new_nodes, node);
482 break;
483
484 case CGRAPH_STATE_IPA:
485 case CGRAPH_STATE_IPA_SSA:
486 case CGRAPH_STATE_EXPANSION:
487 /* Bring the function into finalized state and enqueue for later
488 analyzing and compilation. */
489 node = cgraph_get_create_node (fndecl);
490 node->local.local = false;
491 node->local.finalized = true;
492 node->symbol.force_output = true;
493 if (!lowered && cgraph_state == CGRAPH_STATE_EXPANSION)
494 {
495 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
496 gimple_register_cfg_hooks ();
497 bitmap_obstack_initialize (NULL);
498 execute_pass_list (all_lowering_passes);
499 execute_pass_list (pass_early_local_passes.pass.sub);
500 bitmap_obstack_release (NULL);
501 pop_cfun ();
502
503 lowered = true;
504 }
505 if (lowered)
506 node->lowered = true;
507 if (!cgraph_new_nodes)
508 cgraph_new_nodes = cgraph_node_set_new ();
509 cgraph_node_set_add (cgraph_new_nodes, node);
510 break;
511
512 case CGRAPH_STATE_FINISHED:
513 /* At the very end of compilation we have to do all the work up
514 to expansion. */
515 node = cgraph_create_node (fndecl);
516 if (lowered)
517 node->lowered = true;
518 cgraph_analyze_function (node);
519 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
520 gimple_register_cfg_hooks ();
521 bitmap_obstack_initialize (NULL);
522 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
523 execute_pass_list (pass_early_local_passes.pass.sub);
524 bitmap_obstack_release (NULL);
525 pop_cfun ();
526 expand_function (node);
527 break;
528
529 default:
530 gcc_unreachable ();
531 }
532
533 /* Set a personality if required and we already passed EH lowering. */
534 if (lowered
535 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
536 == eh_personality_lang))
537 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
538 }
539
540 /* Add a top-level asm statement to the list. */
541
542 struct asm_node *
543 add_asm_node (tree asm_str)
544 {
545 struct asm_node *node;
546
547 node = ggc_alloc_cleared_asm_node ();
548 node->asm_str = asm_str;
549 node->order = symtab_order++;
550 node->next = NULL;
551 if (asm_nodes == NULL)
552 asm_nodes = node;
553 else
554 asm_last_node->next = node;
555 asm_last_node = node;
556 return node;
557 }
558
559 /* Output all asm statements we have stored up to be output. */
560
561 static void
562 output_asm_statements (void)
563 {
564 struct asm_node *can;
565
566 if (seen_error ())
567 return;
568
569 for (can = asm_nodes; can; can = can->next)
570 assemble_asm (can->asm_str);
571 asm_nodes = NULL;
572 }
573
574 /* C++ FE sometimes change linkage flags after producing same body aliases. */
575 void
576 fixup_same_cpp_alias_visibility (symtab_node node, symtab_node target, tree alias)
577 {
578 DECL_VIRTUAL_P (node->symbol.decl) = DECL_VIRTUAL_P (alias);
579 if (TREE_PUBLIC (node->symbol.decl))
580 {
581 DECL_EXTERNAL (node->symbol.decl) = DECL_EXTERNAL (alias);
582 DECL_COMDAT (node->symbol.decl) = DECL_COMDAT (alias);
583 DECL_COMDAT_GROUP (node->symbol.decl) = DECL_COMDAT_GROUP (alias);
584 if (DECL_ONE_ONLY (alias)
585 && !node->symbol.same_comdat_group)
586 symtab_add_to_same_comdat_group ((symtab_node)node, (symtab_node)target);
587 }
588 }
589
590 /* Analyze the function scheduled to be output. */
591 static void
592 cgraph_analyze_function (struct cgraph_node *node)
593 {
594 tree decl = node->symbol.decl;
595 location_t saved_loc = input_location;
596 input_location = DECL_SOURCE_LOCATION (decl);
597
598 if (node->alias && node->thunk.alias)
599 {
600 struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias);
601 struct cgraph_node *n;
602
603 for (n = tgt; n && n->alias;
604 n = n->analyzed ? cgraph_alias_aliased_node (n) : NULL)
605 if (n == node)
606 {
607 error ("function %q+D part of alias cycle", node->symbol.decl);
608 node->alias = false;
609 input_location = saved_loc;
610 return;
611 }
612 if (!vec_safe_length (node->symbol.ref_list.references))
613 ipa_record_reference ((symtab_node)node, (symtab_node)tgt,
614 IPA_REF_ALIAS, NULL);
615 if (node->same_body_alias)
616 {
617 DECL_DECLARED_INLINE_P (node->symbol.decl)
618 = DECL_DECLARED_INLINE_P (node->thunk.alias);
619 DECL_DISREGARD_INLINE_LIMITS (node->symbol.decl)
620 = DECL_DISREGARD_INLINE_LIMITS (node->thunk.alias);
621 fixup_same_cpp_alias_visibility ((symtab_node) node, (symtab_node) tgt, node->thunk.alias);
622 }
623
624 if (node->symbol.address_taken)
625 cgraph_mark_address_taken_node (cgraph_alias_aliased_node (node));
626 }
627 else if (node->thunk.thunk_p)
628 {
629 cgraph_create_edge (node, cgraph_get_node (node->thunk.alias),
630 NULL, 0, CGRAPH_FREQ_BASE);
631 }
632 else if (node->dispatcher_function)
633 {
634 /* Generate the dispatcher body of multi-versioned functions. */
635 struct cgraph_function_version_info *dispatcher_version_info
636 = get_cgraph_node_version (node);
637 if (dispatcher_version_info != NULL
638 && (dispatcher_version_info->dispatcher_resolver
639 == NULL_TREE))
640 {
641 tree resolver = NULL_TREE;
642 gcc_assert (targetm.generate_version_dispatcher_body);
643 resolver = targetm.generate_version_dispatcher_body (node);
644 gcc_assert (resolver != NULL_TREE);
645 }
646 }
647 else
648 {
649 push_cfun (DECL_STRUCT_FUNCTION (decl));
650
651 assign_assembler_name_if_neeeded (node->symbol.decl);
652
653 /* Make sure to gimplify bodies only once. During analyzing a
654 function we lower it, which will require gimplified nested
655 functions, so we can end up here with an already gimplified
656 body. */
657 if (!gimple_has_body_p (decl))
658 gimplify_function_tree (decl);
659 dump_function (TDI_generic, decl);
660
661 /* Lower the function. */
662 if (!node->lowered)
663 {
664 if (node->nested)
665 lower_nested_functions (node->symbol.decl);
666 gcc_assert (!node->nested);
667
668 gimple_register_cfg_hooks ();
669 bitmap_obstack_initialize (NULL);
670 execute_pass_list (all_lowering_passes);
671 free_dominance_info (CDI_POST_DOMINATORS);
672 free_dominance_info (CDI_DOMINATORS);
673 compact_blocks ();
674 bitmap_obstack_release (NULL);
675 node->lowered = true;
676 }
677
678 pop_cfun ();
679 }
680 node->analyzed = true;
681
682 input_location = saved_loc;
683 }
684
685 /* C++ frontend produce same body aliases all over the place, even before PCH
686 gets streamed out. It relies on us linking the aliases with their function
687 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
688 first produce aliases without links, but once C++ FE is sure he won't sream
689 PCH we build the links via this function. */
690
691 void
692 cgraph_process_same_body_aliases (void)
693 {
694 struct cgraph_node *node;
695 FOR_EACH_FUNCTION (node)
696 if (node->same_body_alias
697 && !vec_safe_length (node->symbol.ref_list.references))
698 {
699 struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias);
700 ipa_record_reference ((symtab_node)node, (symtab_node)tgt,
701 IPA_REF_ALIAS, NULL);
702 }
703 same_body_aliases_done = true;
704 }
705
706 /* Process attributes common for vars and functions. */
707
708 static void
709 process_common_attributes (tree decl)
710 {
711 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
712
713 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
714 {
715 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
716 "%<weakref%> attribute should be accompanied with"
717 " an %<alias%> attribute");
718 DECL_WEAK (decl) = 0;
719 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
720 DECL_ATTRIBUTES (decl));
721 }
722 }
723
724 /* Look for externally_visible and used attributes and mark cgraph nodes
725 accordingly.
726
727 We cannot mark the nodes at the point the attributes are processed (in
728 handle_*_attribute) because the copy of the declarations available at that
729 point may not be canonical. For example, in:
730
731 void f();
732 void f() __attribute__((used));
733
734 the declaration we see in handle_used_attribute will be the second
735 declaration -- but the front end will subsequently merge that declaration
736 with the original declaration and discard the second declaration.
737
738 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
739
740 void f() {}
741 void f() __attribute__((externally_visible));
742
743 is valid.
744
745 So, we walk the nodes at the end of the translation unit, applying the
746 attributes at that point. */
747
748 static void
749 process_function_and_variable_attributes (struct cgraph_node *first,
750 struct varpool_node *first_var)
751 {
752 struct cgraph_node *node;
753 struct varpool_node *vnode;
754
755 for (node = cgraph_first_function (); node != first;
756 node = cgraph_next_function (node))
757 {
758 tree decl = node->symbol.decl;
759 if (DECL_PRESERVE_P (decl))
760 cgraph_mark_force_output_node (node);
761 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
762 {
763 if (! TREE_PUBLIC (node->symbol.decl))
764 warning_at (DECL_SOURCE_LOCATION (node->symbol.decl), OPT_Wattributes,
765 "%<externally_visible%>"
766 " attribute have effect only on public objects");
767 }
768 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
769 && (node->local.finalized && !node->alias))
770 {
771 warning_at (DECL_SOURCE_LOCATION (node->symbol.decl), OPT_Wattributes,
772 "%<weakref%> attribute ignored"
773 " because function is defined");
774 DECL_WEAK (decl) = 0;
775 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
776 DECL_ATTRIBUTES (decl));
777 }
778
779 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
780 && !DECL_DECLARED_INLINE_P (decl)
781 /* redefining extern inline function makes it DECL_UNINLINABLE. */
782 && !DECL_UNINLINABLE (decl))
783 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
784 "always_inline function might not be inlinable");
785
786 process_common_attributes (decl);
787 }
788 for (vnode = varpool_first_variable (); vnode != first_var;
789 vnode = varpool_next_variable (vnode))
790 {
791 tree decl = vnode->symbol.decl;
792 if (DECL_EXTERNAL (decl)
793 && DECL_INITIAL (decl)
794 && const_value_known_p (decl))
795 varpool_finalize_decl (decl);
796 if (DECL_PRESERVE_P (decl))
797 vnode->symbol.force_output = true;
798 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
799 {
800 if (! TREE_PUBLIC (vnode->symbol.decl))
801 warning_at (DECL_SOURCE_LOCATION (vnode->symbol.decl), OPT_Wattributes,
802 "%<externally_visible%>"
803 " attribute have effect only on public objects");
804 }
805 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
806 && vnode->finalized
807 && DECL_INITIAL (decl))
808 {
809 warning_at (DECL_SOURCE_LOCATION (vnode->symbol.decl), OPT_Wattributes,
810 "%<weakref%> attribute ignored"
811 " because variable is initialized");
812 DECL_WEAK (decl) = 0;
813 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
814 DECL_ATTRIBUTES (decl));
815 }
816 process_common_attributes (decl);
817 }
818 }
819
820 /* Mark DECL as finalized. By finalizing the declaration, frontend instruct the
821 middle end to output the variable to asm file, if needed or externally
822 visible. */
823
824 void
825 varpool_finalize_decl (tree decl)
826 {
827 struct varpool_node *node = varpool_node_for_decl (decl);
828
829 gcc_assert (TREE_STATIC (decl) || DECL_EXTERNAL (decl));
830
831 if (node->finalized)
832 return;
833 notice_global_symbol (decl);
834 node->finalized = true;
835 if (TREE_THIS_VOLATILE (decl) || DECL_PRESERVE_P (decl)
836 /* Traditionally we do not eliminate static variables when not
837 optimizing and when not doing toplevel reoder. */
838 || (!flag_toplevel_reorder && !DECL_COMDAT (node->symbol.decl)
839 && !DECL_ARTIFICIAL (node->symbol.decl)))
840 node->symbol.force_output = true;
841
842 if (cgraph_state == CGRAPH_STATE_CONSTRUCTION
843 && (decide_is_variable_needed (node, decl)
844 || referred_to_p ((symtab_node)node)))
845 enqueue_node ((symtab_node)node);
846 if (cgraph_state >= CGRAPH_STATE_IPA_SSA)
847 varpool_analyze_node (node);
848 /* Some frontends produce various interface variables after compilation
849 finished. */
850 if (cgraph_state == CGRAPH_STATE_FINISHED)
851 varpool_assemble_decl (node);
852 }
853
854
855 /* Determine if a symbol NODE is finalized and needed. */
856
857 inline static bool
858 symbol_finalized_and_needed (symtab_node node)
859 {
860 if (cgraph_node *cnode = dyn_cast <cgraph_node> (node))
861 return cnode->local.finalized
862 && cgraph_decide_is_function_needed (cnode, cnode->symbol.decl);
863 if (varpool_node *vnode = dyn_cast <varpool_node> (node))
864 return vnode->finalized
865 && !DECL_EXTERNAL (vnode->symbol.decl)
866 && decide_is_variable_needed (vnode, vnode->symbol.decl);
867 return false;
868 }
869
870 /* Determine if a symbol NODE is finalized. */
871
872 inline static bool
873 symbol_finalized (symtab_node node)
874 {
875 if (cgraph_node *cnode= dyn_cast <cgraph_node> (node))
876 return cnode->local.finalized;
877 if (varpool_node *vnode = dyn_cast <varpool_node> (node))
878 return vnode->finalized;
879 return false;
880 }
881
882
883 /* Discover all functions and variables that are trivially needed, analyze
884 them as well as all functions and variables referred by them */
885
886 static void
887 cgraph_analyze_functions (void)
888 {
889 /* Keep track of already processed nodes when called multiple times for
890 intermodule optimization. */
891 static struct cgraph_node *first_analyzed;
892 struct cgraph_node *first_handled = first_analyzed;
893 static struct varpool_node *first_analyzed_var;
894 struct varpool_node *first_handled_var = first_analyzed_var;
895
896 symtab_node node, next;
897 int i;
898 struct ipa_ref *ref;
899 bool changed = true;
900
901 bitmap_obstack_initialize (NULL);
902 cgraph_state = CGRAPH_STATE_CONSTRUCTION;
903
904 /* Analysis adds static variables that in turn adds references to new functions.
905 So we need to iterate the process until it stabilize. */
906 while (changed)
907 {
908 changed = false;
909 process_function_and_variable_attributes (first_analyzed,
910 first_analyzed_var);
911
912 /* First identify the trivially needed symbols. */
913 for (node = symtab_nodes;
914 node != (symtab_node)first_analyzed
915 && node != (symtab_node)first_analyzed_var; node = node->symbol.next)
916 {
917 if (symbol_finalized_and_needed (node))
918 {
919 enqueue_node (node);
920 if (!changed && cgraph_dump_file)
921 fprintf (cgraph_dump_file, "Trivially needed symbols:");
922 changed = true;
923 if (cgraph_dump_file)
924 fprintf (cgraph_dump_file, " %s", symtab_node_asm_name (node));
925 }
926 if (node == (symtab_node)first_analyzed
927 || node == (symtab_node)first_analyzed_var)
928 break;
929 }
930 cgraph_process_new_functions ();
931 first_analyzed_var = varpool_first_variable ();
932 first_analyzed = cgraph_first_function ();
933
934 if (changed && dump_file)
935 fprintf (cgraph_dump_file, "\n");
936
937 /* Lower representation, build callgraph edges and references for all trivially
938 needed symbols and all symbols referred by them. */
939 while (first != (symtab_node)(void *)1)
940 {
941 changed = true;
942 node = first;
943 first = (symtab_node)first->symbol.aux;
944 cgraph_node *cnode = dyn_cast <cgraph_node> (node);
945 if (cnode && cnode->local.finalized)
946 {
947 struct cgraph_edge *edge;
948 tree decl = cnode->symbol.decl;
949
950 /* ??? It is possible to create extern inline function
951 and later using weak alias attribute to kill its body.
952 See gcc.c-torture/compile/20011119-1.c */
953 if (!DECL_STRUCT_FUNCTION (decl)
954 && (!cnode->alias || !cnode->thunk.alias)
955 && !cnode->thunk.thunk_p
956 && !cnode->dispatcher_function)
957 {
958 cgraph_reset_node (cnode);
959 cnode->local.redefined_extern_inline = true;
960 continue;
961 }
962
963 if (!cnode->analyzed)
964 cgraph_analyze_function (cnode);
965
966 for (edge = cnode->callees; edge; edge = edge->next_callee)
967 if (edge->callee->local.finalized)
968 enqueue_node ((symtab_node)edge->callee);
969
970 /* If decl is a clone of an abstract function,
971 mark that abstract function so that we don't release its body.
972 The DECL_INITIAL() of that abstract function declaration
973 will be later needed to output debug info. */
974 if (DECL_ABSTRACT_ORIGIN (decl))
975 {
976 struct cgraph_node *origin_node
977 = cgraph_get_node (DECL_ABSTRACT_ORIGIN (decl));
978 origin_node->abstract_and_needed = true;
979 }
980 }
981 else
982 {
983 varpool_node *vnode = dyn_cast <varpool_node> (node);
984 if (vnode && vnode->finalized)
985 varpool_analyze_node (vnode);
986 }
987
988 if (node->symbol.same_comdat_group)
989 {
990 symtab_node next;
991 for (next = node->symbol.same_comdat_group;
992 next != node;
993 next = next->symbol.same_comdat_group)
994 enqueue_node (next);
995 }
996 for (i = 0; ipa_ref_list_reference_iterate (&node->symbol.ref_list, i, ref); i++)
997 if (symbol_finalized (ref->referred))
998 enqueue_node (ref->referred);
999 cgraph_process_new_functions ();
1000 }
1001 }
1002
1003 /* Collect entry points to the unit. */
1004 if (cgraph_dump_file)
1005 {
1006 fprintf (cgraph_dump_file, "\n\nInitial ");
1007 dump_symtab (cgraph_dump_file);
1008 }
1009
1010 if (cgraph_dump_file)
1011 fprintf (cgraph_dump_file, "\nRemoving unused symbols:");
1012
1013 for (node = symtab_nodes;
1014 node != (symtab_node)first_handled
1015 && node != (symtab_node)first_handled_var; node = next)
1016 {
1017 next = node->symbol.next;
1018 if (!node->symbol.aux && !referred_to_p (node))
1019 {
1020 if (cgraph_dump_file)
1021 fprintf (cgraph_dump_file, " %s", symtab_node_name (node));
1022 symtab_remove_node (node);
1023 continue;
1024 }
1025 if (cgraph_node *cnode = dyn_cast <cgraph_node> (node))
1026 {
1027 tree decl = node->symbol.decl;
1028
1029 if (cnode->local.finalized && !gimple_has_body_p (decl)
1030 && (!cnode->alias || !cnode->thunk.alias)
1031 && !cnode->thunk.thunk_p)
1032 cgraph_reset_node (cnode);
1033
1034 gcc_assert (!cnode->local.finalized || cnode->thunk.thunk_p
1035 || cnode->alias
1036 || gimple_has_body_p (decl));
1037 gcc_assert (cnode->analyzed == cnode->local.finalized);
1038 }
1039 node->symbol.aux = NULL;
1040 }
1041 first_analyzed = cgraph_first_function ();
1042 first_analyzed_var = varpool_first_variable ();
1043 if (cgraph_dump_file)
1044 {
1045 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1046 dump_symtab (cgraph_dump_file);
1047 }
1048 bitmap_obstack_release (NULL);
1049 ggc_collect ();
1050 }
1051
1052 /* Translate the ugly representation of aliases as alias pairs into nice
1053 representation in callgraph. We don't handle all cases yet,
1054 unforutnately. */
1055
1056 static void
1057 handle_alias_pairs (void)
1058 {
1059 alias_pair *p;
1060 unsigned i;
1061
1062 for (i = 0; alias_pairs && alias_pairs->iterate (i, &p);)
1063 {
1064 symtab_node target_node = symtab_node_for_asm (p->target);
1065
1066 /* Weakrefs with target not defined in current unit are easy to handle; they
1067 behave just as external variables except we need to note the alias flag
1068 to later output the weakref pseudo op into asm file. */
1069 if (!target_node && lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL)
1070 {
1071 if (TREE_CODE (p->decl) == FUNCTION_DECL)
1072 {
1073 struct cgraph_node *anode = cgraph_get_create_node (p->decl);
1074 anode->alias = true;
1075 anode->thunk.alias = p->target;
1076 }
1077 else
1078 {
1079 struct varpool_node *anode = varpool_get_node (p->decl);
1080 anode->alias = true;
1081 anode->alias_of = p->target;
1082 }
1083 DECL_EXTERNAL (p->decl) = 1;
1084 alias_pairs->unordered_remove (i);
1085 continue;
1086 }
1087 else if (!target_node)
1088 {
1089 error ("%q+D aliased to undefined symbol %qE", p->decl, p->target);
1090 alias_pairs->unordered_remove (i);
1091 continue;
1092 }
1093
1094 /* Normally EXTERNAL flag is used to mark external inlines,
1095 however for aliases it seems to be allowed to use it w/o
1096 any meaning. See gcc.dg/attr-alias-3.c
1097 However for weakref we insist on EXTERNAL flag being set.
1098 See gcc.dg/attr-alias-5.c */
1099 if (DECL_EXTERNAL (p->decl))
1100 DECL_EXTERNAL (p->decl)
1101 = lookup_attribute ("weakref",
1102 DECL_ATTRIBUTES (p->decl)) != NULL;
1103
1104 if (DECL_EXTERNAL (target_node->symbol.decl)
1105 /* We use local aliases for C++ thunks to force the tailcall
1106 to bind locally. This is a hack - to keep it working do
1107 the following (which is not strictly correct). */
1108 && (! TREE_CODE (target_node->symbol.decl) == FUNCTION_DECL
1109 || ! DECL_VIRTUAL_P (target_node->symbol.decl))
1110 && ! lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)))
1111 {
1112 error ("%q+D aliased to external symbol %qE",
1113 p->decl, p->target);
1114 }
1115
1116 if (TREE_CODE (p->decl) == FUNCTION_DECL
1117 && target_node && is_a <cgraph_node> (target_node))
1118 {
1119 struct cgraph_node *src_node = cgraph_get_node (p->decl);
1120 if (src_node && src_node->local.finalized)
1121 cgraph_reset_node (src_node);
1122 cgraph_create_function_alias (p->decl, target_node->symbol.decl);
1123 alias_pairs->unordered_remove (i);
1124 }
1125 else if (TREE_CODE (p->decl) == VAR_DECL
1126 && target_node && is_a <varpool_node> (target_node))
1127 {
1128 varpool_create_variable_alias (p->decl, target_node->symbol.decl);
1129 alias_pairs->unordered_remove (i);
1130 }
1131 else
1132 {
1133 error ("%q+D alias in between function and variable is not supported",
1134 p->decl);
1135 warning (0, "%q+D aliased declaration",
1136 target_node->symbol.decl);
1137 alias_pairs->unordered_remove (i);
1138 }
1139 }
1140 vec_free (alias_pairs);
1141 }
1142
1143
1144 /* Figure out what functions we want to assemble. */
1145
1146 static void
1147 mark_functions_to_output (void)
1148 {
1149 struct cgraph_node *node;
1150 #ifdef ENABLE_CHECKING
1151 bool check_same_comdat_groups = false;
1152
1153 FOR_EACH_FUNCTION (node)
1154 gcc_assert (!node->process);
1155 #endif
1156
1157 FOR_EACH_FUNCTION (node)
1158 {
1159 tree decl = node->symbol.decl;
1160
1161 gcc_assert (!node->process || node->symbol.same_comdat_group);
1162 if (node->process)
1163 continue;
1164
1165 /* We need to output all local functions that are used and not
1166 always inlined, as well as those that are reachable from
1167 outside the current compilation unit. */
1168 if (node->analyzed
1169 && !node->thunk.thunk_p
1170 && !node->alias
1171 && !node->global.inlined_to
1172 && !TREE_ASM_WRITTEN (decl)
1173 && !DECL_EXTERNAL (decl))
1174 {
1175 node->process = 1;
1176 if (node->symbol.same_comdat_group)
1177 {
1178 struct cgraph_node *next;
1179 for (next = cgraph (node->symbol.same_comdat_group);
1180 next != node;
1181 next = cgraph (next->symbol.same_comdat_group))
1182 if (!next->thunk.thunk_p && !next->alias)
1183 next->process = 1;
1184 }
1185 }
1186 else if (node->symbol.same_comdat_group)
1187 {
1188 #ifdef ENABLE_CHECKING
1189 check_same_comdat_groups = true;
1190 #endif
1191 }
1192 else
1193 {
1194 /* We should've reclaimed all functions that are not needed. */
1195 #ifdef ENABLE_CHECKING
1196 if (!node->global.inlined_to
1197 && gimple_has_body_p (decl)
1198 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1199 are inside partition, we can end up not removing the body since we no longer
1200 have analyzed node pointing to it. */
1201 && !node->symbol.in_other_partition
1202 && !node->alias
1203 && !node->clones
1204 && !DECL_EXTERNAL (decl))
1205 {
1206 dump_cgraph_node (stderr, node);
1207 internal_error ("failed to reclaim unneeded function");
1208 }
1209 #endif
1210 gcc_assert (node->global.inlined_to
1211 || !gimple_has_body_p (decl)
1212 || node->symbol.in_other_partition
1213 || node->clones
1214 || DECL_ARTIFICIAL (decl)
1215 || DECL_EXTERNAL (decl));
1216
1217 }
1218
1219 }
1220 #ifdef ENABLE_CHECKING
1221 if (check_same_comdat_groups)
1222 FOR_EACH_FUNCTION (node)
1223 if (node->symbol.same_comdat_group && !node->process)
1224 {
1225 tree decl = node->symbol.decl;
1226 if (!node->global.inlined_to
1227 && gimple_has_body_p (decl)
1228 /* FIXME: in an ltrans unit when the offline copy is outside a
1229 partition but inline copies are inside a partition, we can
1230 end up not removing the body since we no longer have an
1231 analyzed node pointing to it. */
1232 && !node->symbol.in_other_partition
1233 && !node->clones
1234 && !DECL_EXTERNAL (decl))
1235 {
1236 dump_cgraph_node (stderr, node);
1237 internal_error ("failed to reclaim unneeded function in same "
1238 "comdat group");
1239 }
1240 }
1241 #endif
1242 }
1243
1244 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1245 in lowered gimple form. IN_SSA is true if the gimple is in SSA.
1246
1247 Set current_function_decl and cfun to newly constructed empty function body.
1248 return basic block in the function body. */
1249
1250 basic_block
1251 init_lowered_empty_function (tree decl, bool in_ssa)
1252 {
1253 basic_block bb;
1254
1255 current_function_decl = decl;
1256 allocate_struct_function (decl, false);
1257 gimple_register_cfg_hooks ();
1258 init_empty_tree_cfg ();
1259
1260 if (in_ssa)
1261 {
1262 init_tree_ssa (cfun);
1263 init_ssa_operands (cfun);
1264 cfun->gimple_df->in_ssa_p = true;
1265 }
1266
1267 DECL_INITIAL (decl) = make_node (BLOCK);
1268
1269 DECL_SAVED_TREE (decl) = error_mark_node;
1270 cfun->curr_properties |=
1271 (PROP_gimple_lcf | PROP_gimple_leh | PROP_cfg | PROP_ssa | PROP_gimple_any);
1272
1273 /* Create BB for body of the function and connect it properly. */
1274 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
1275 make_edge (ENTRY_BLOCK_PTR, bb, 0);
1276 make_edge (bb, EXIT_BLOCK_PTR, 0);
1277
1278 return bb;
1279 }
1280
1281 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1282 offset indicated by VIRTUAL_OFFSET, if that is
1283 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1284 zero for a result adjusting thunk. */
1285
1286 static tree
1287 thunk_adjust (gimple_stmt_iterator * bsi,
1288 tree ptr, bool this_adjusting,
1289 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1290 {
1291 gimple stmt;
1292 tree ret;
1293
1294 if (this_adjusting
1295 && fixed_offset != 0)
1296 {
1297 stmt = gimple_build_assign
1298 (ptr, fold_build_pointer_plus_hwi_loc (input_location,
1299 ptr,
1300 fixed_offset));
1301 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1302 }
1303
1304 /* If there's a virtual offset, look up that value in the vtable and
1305 adjust the pointer again. */
1306 if (virtual_offset)
1307 {
1308 tree vtabletmp;
1309 tree vtabletmp2;
1310 tree vtabletmp3;
1311
1312 if (!vtable_entry_type)
1313 {
1314 tree vfunc_type = make_node (FUNCTION_TYPE);
1315 TREE_TYPE (vfunc_type) = integer_type_node;
1316 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1317 layout_type (vfunc_type);
1318
1319 vtable_entry_type = build_pointer_type (vfunc_type);
1320 }
1321
1322 vtabletmp =
1323 create_tmp_reg (build_pointer_type
1324 (build_pointer_type (vtable_entry_type)), "vptr");
1325
1326 /* The vptr is always at offset zero in the object. */
1327 stmt = gimple_build_assign (vtabletmp,
1328 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1329 ptr));
1330 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1331
1332 /* Form the vtable address. */
1333 vtabletmp2 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp)),
1334 "vtableaddr");
1335 stmt = gimple_build_assign (vtabletmp2,
1336 build_simple_mem_ref (vtabletmp));
1337 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1338
1339 /* Find the entry with the vcall offset. */
1340 stmt = gimple_build_assign (vtabletmp2,
1341 fold_build_pointer_plus_loc (input_location,
1342 vtabletmp2,
1343 virtual_offset));
1344 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1345
1346 /* Get the offset itself. */
1347 vtabletmp3 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1348 "vcalloffset");
1349 stmt = gimple_build_assign (vtabletmp3,
1350 build_simple_mem_ref (vtabletmp2));
1351 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1352
1353 /* Adjust the `this' pointer. */
1354 ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
1355 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1356 GSI_CONTINUE_LINKING);
1357 }
1358
1359 if (!this_adjusting
1360 && fixed_offset != 0)
1361 /* Adjust the pointer by the constant. */
1362 {
1363 tree ptrtmp;
1364
1365 if (TREE_CODE (ptr) == VAR_DECL)
1366 ptrtmp = ptr;
1367 else
1368 {
1369 ptrtmp = create_tmp_reg (TREE_TYPE (ptr), "ptr");
1370 stmt = gimple_build_assign (ptrtmp, ptr);
1371 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1372 }
1373 ptr = fold_build_pointer_plus_hwi_loc (input_location,
1374 ptrtmp, fixed_offset);
1375 }
1376
1377 /* Emit the statement and gimplify the adjustment expression. */
1378 ret = create_tmp_reg (TREE_TYPE (ptr), "adjusted_this");
1379 stmt = gimple_build_assign (ret, ptr);
1380 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1381
1382 return ret;
1383 }
1384
1385 /* Produce assembler for thunk NODE. */
1386
1387 static void
1388 assemble_thunk (struct cgraph_node *node)
1389 {
1390 bool this_adjusting = node->thunk.this_adjusting;
1391 HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1392 HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1393 tree virtual_offset = NULL;
1394 tree alias = node->thunk.alias;
1395 tree thunk_fndecl = node->symbol.decl;
1396 tree a = DECL_ARGUMENTS (thunk_fndecl);
1397
1398 current_function_decl = thunk_fndecl;
1399
1400 /* Ensure thunks are emitted in their correct sections. */
1401 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1402
1403 if (this_adjusting
1404 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1405 virtual_value, alias))
1406 {
1407 const char *fnname;
1408 tree fn_block;
1409 tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1410
1411 DECL_RESULT (thunk_fndecl)
1412 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1413 RESULT_DECL, 0, restype);
1414 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1415
1416 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1417 create one. */
1418 fn_block = make_node (BLOCK);
1419 BLOCK_VARS (fn_block) = a;
1420 DECL_INITIAL (thunk_fndecl) = fn_block;
1421 init_function_start (thunk_fndecl);
1422 cfun->is_thunk = 1;
1423 insn_locations_init ();
1424 set_curr_insn_location (DECL_SOURCE_LOCATION (thunk_fndecl));
1425 prologue_location = curr_insn_location ();
1426 assemble_start_function (thunk_fndecl, fnname);
1427
1428 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1429 fixed_offset, virtual_value, alias);
1430
1431 assemble_end_function (thunk_fndecl, fnname);
1432 insn_locations_finalize ();
1433 init_insn_lengths ();
1434 free_after_compilation (cfun);
1435 set_cfun (NULL);
1436 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1437 node->thunk.thunk_p = false;
1438 node->analyzed = false;
1439 }
1440 else
1441 {
1442 tree restype;
1443 basic_block bb, then_bb, else_bb, return_bb;
1444 gimple_stmt_iterator bsi;
1445 int nargs = 0;
1446 tree arg;
1447 int i;
1448 tree resdecl;
1449 tree restmp = NULL;
1450 vec<tree> vargs;
1451
1452 gimple call;
1453 gimple ret;
1454
1455 DECL_IGNORED_P (thunk_fndecl) = 1;
1456 bitmap_obstack_initialize (NULL);
1457
1458 if (node->thunk.virtual_offset_p)
1459 virtual_offset = size_int (virtual_value);
1460
1461 /* Build the return declaration for the function. */
1462 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1463 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1464 {
1465 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1466 DECL_ARTIFICIAL (resdecl) = 1;
1467 DECL_IGNORED_P (resdecl) = 1;
1468 DECL_RESULT (thunk_fndecl) = resdecl;
1469 }
1470 else
1471 resdecl = DECL_RESULT (thunk_fndecl);
1472
1473 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl, true);
1474
1475 bsi = gsi_start_bb (bb);
1476
1477 /* Build call to the function being thunked. */
1478 if (!VOID_TYPE_P (restype))
1479 {
1480 if (!is_gimple_reg_type (restype))
1481 {
1482 restmp = resdecl;
1483 add_local_decl (cfun, restmp);
1484 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1485 }
1486 else
1487 restmp = create_tmp_reg (restype, "retval");
1488 }
1489
1490 for (arg = a; arg; arg = DECL_CHAIN (arg))
1491 nargs++;
1492 vargs.create (nargs);
1493 if (this_adjusting)
1494 vargs.quick_push (thunk_adjust (&bsi, a, 1, fixed_offset,
1495 virtual_offset));
1496 else
1497 vargs.quick_push (a);
1498 for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
1499 vargs.quick_push (arg);
1500 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1501 vargs.release ();
1502 gimple_call_set_from_thunk (call, true);
1503 if (restmp)
1504 gimple_call_set_lhs (call, restmp);
1505 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1506
1507 if (restmp && !this_adjusting)
1508 {
1509 tree true_label = NULL_TREE;
1510
1511 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1512 {
1513 gimple stmt;
1514 /* If the return type is a pointer, we need to
1515 protect against NULL. We know there will be an
1516 adjustment, because that's why we're emitting a
1517 thunk. */
1518 then_bb = create_basic_block (NULL, (void *) 0, bb);
1519 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1520 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1521 remove_edge (single_succ_edge (bb));
1522 true_label = gimple_block_label (then_bb);
1523 stmt = gimple_build_cond (NE_EXPR, restmp,
1524 build_zero_cst (TREE_TYPE (restmp)),
1525 NULL_TREE, NULL_TREE);
1526 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1527 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1528 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1529 make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1530 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1531 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1532 bsi = gsi_last_bb (then_bb);
1533 }
1534
1535 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1536 fixed_offset, virtual_offset);
1537 if (true_label)
1538 {
1539 gimple stmt;
1540 bsi = gsi_last_bb (else_bb);
1541 stmt = gimple_build_assign (restmp,
1542 build_zero_cst (TREE_TYPE (restmp)));
1543 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1544 bsi = gsi_last_bb (return_bb);
1545 }
1546 }
1547 else
1548 gimple_call_set_tail (call, true);
1549
1550 /* Build return value. */
1551 ret = gimple_build_return (restmp);
1552 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1553
1554 delete_unreachable_blocks ();
1555 update_ssa (TODO_update_ssa);
1556
1557 /* Since we want to emit the thunk, we explicitly mark its name as
1558 referenced. */
1559 node->thunk.thunk_p = false;
1560 cgraph_node_remove_callees (node);
1561 cgraph_add_new_function (thunk_fndecl, true);
1562 bitmap_obstack_release (NULL);
1563 }
1564 current_function_decl = NULL;
1565 set_cfun (NULL);
1566 }
1567
1568
1569
1570 /* Assemble thunks and aliases associated to NODE. */
1571
1572 static void
1573 assemble_thunks_and_aliases (struct cgraph_node *node)
1574 {
1575 struct cgraph_edge *e;
1576 int i;
1577 struct ipa_ref *ref;
1578
1579 for (e = node->callers; e;)
1580 if (e->caller->thunk.thunk_p)
1581 {
1582 struct cgraph_node *thunk = e->caller;
1583
1584 e = e->next_caller;
1585 assemble_thunks_and_aliases (thunk);
1586 assemble_thunk (thunk);
1587 }
1588 else
1589 e = e->next_caller;
1590 for (i = 0; ipa_ref_list_referring_iterate (&node->symbol.ref_list,
1591 i, ref); i++)
1592 if (ref->use == IPA_REF_ALIAS)
1593 {
1594 struct cgraph_node *alias = ipa_ref_referring_node (ref);
1595 bool saved_written = TREE_ASM_WRITTEN (alias->thunk.alias);
1596
1597 /* Force assemble_alias to really output the alias this time instead
1598 of buffering it in same alias pairs. */
1599 TREE_ASM_WRITTEN (alias->thunk.alias) = 1;
1600 do_assemble_alias (alias->symbol.decl,
1601 DECL_ASSEMBLER_NAME (alias->thunk.alias));
1602 assemble_thunks_and_aliases (alias);
1603 TREE_ASM_WRITTEN (alias->thunk.alias) = saved_written;
1604 }
1605 }
1606
1607 /* Expand function specified by NODE. */
1608
1609 static void
1610 expand_function (struct cgraph_node *node)
1611 {
1612 tree decl = node->symbol.decl;
1613 location_t saved_loc;
1614
1615 /* We ought to not compile any inline clones. */
1616 gcc_assert (!node->global.inlined_to);
1617
1618 announce_function (decl);
1619 node->process = 0;
1620 gcc_assert (node->lowered);
1621
1622 /* Generate RTL for the body of DECL. */
1623
1624 timevar_push (TV_REST_OF_COMPILATION);
1625
1626 gcc_assert (cgraph_global_info_ready);
1627
1628 /* Initialize the default bitmap obstack. */
1629 bitmap_obstack_initialize (NULL);
1630
1631 /* Initialize the RTL code for the function. */
1632 current_function_decl = decl;
1633 saved_loc = input_location;
1634 input_location = DECL_SOURCE_LOCATION (decl);
1635 init_function_start (decl);
1636
1637 gimple_register_cfg_hooks ();
1638
1639 bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
1640
1641 execute_all_ipa_transforms ();
1642
1643 /* Perform all tree transforms and optimizations. */
1644
1645 /* Signal the start of passes. */
1646 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START, NULL);
1647
1648 execute_pass_list (all_passes);
1649
1650 /* Signal the end of passes. */
1651 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END, NULL);
1652
1653 bitmap_obstack_release (&reg_obstack);
1654
1655 /* Release the default bitmap obstack. */
1656 bitmap_obstack_release (NULL);
1657
1658 /* If requested, warn about function definitions where the function will
1659 return a value (usually of some struct or union type) which itself will
1660 take up a lot of stack space. */
1661 if (warn_larger_than && !DECL_EXTERNAL (decl) && TREE_TYPE (decl))
1662 {
1663 tree ret_type = TREE_TYPE (TREE_TYPE (decl));
1664
1665 if (ret_type && TYPE_SIZE_UNIT (ret_type)
1666 && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
1667 && 0 < compare_tree_int (TYPE_SIZE_UNIT (ret_type),
1668 larger_than_size))
1669 {
1670 unsigned int size_as_int
1671 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
1672
1673 if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
1674 warning (OPT_Wlarger_than_, "size of return value of %q+D is %u bytes",
1675 decl, size_as_int);
1676 else
1677 warning (OPT_Wlarger_than_, "size of return value of %q+D is larger than %wd bytes",
1678 decl, larger_than_size);
1679 }
1680 }
1681
1682 gimple_set_body (decl, NULL);
1683 if (DECL_STRUCT_FUNCTION (decl) == 0
1684 && !cgraph_get_node (decl)->origin)
1685 {
1686 /* Stop pointing to the local nodes about to be freed.
1687 But DECL_INITIAL must remain nonzero so we know this
1688 was an actual function definition.
1689 For a nested function, this is done in c_pop_function_context.
1690 If rest_of_compilation set this to 0, leave it 0. */
1691 if (DECL_INITIAL (decl) != 0)
1692 DECL_INITIAL (decl) = error_mark_node;
1693 }
1694
1695 input_location = saved_loc;
1696
1697 ggc_collect ();
1698 timevar_pop (TV_REST_OF_COMPILATION);
1699
1700 /* Make sure that BE didn't give up on compiling. */
1701 gcc_assert (TREE_ASM_WRITTEN (decl));
1702 set_cfun (NULL);
1703 current_function_decl = NULL;
1704
1705 /* It would make a lot more sense to output thunks before function body to get more
1706 forward and lest backwarding jumps. This however would need solving problem
1707 with comdats. See PR48668. Also aliases must come after function itself to
1708 make one pass assemblers, like one on AIX, happy. See PR 50689.
1709 FIXME: Perhaps thunks should be move before function IFF they are not in comdat
1710 groups. */
1711 assemble_thunks_and_aliases (node);
1712 cgraph_release_function_body (node);
1713 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1714 points to the dead function body. */
1715 cgraph_node_remove_callees (node);
1716 }
1717
1718
1719 /* Expand all functions that must be output.
1720
1721 Attempt to topologically sort the nodes so function is output when
1722 all called functions are already assembled to allow data to be
1723 propagated across the callgraph. Use a stack to get smaller distance
1724 between a function and its callees (later we may choose to use a more
1725 sophisticated algorithm for function reordering; we will likely want
1726 to use subsections to make the output functions appear in top-down
1727 order). */
1728
1729 static void
1730 expand_all_functions (void)
1731 {
1732 struct cgraph_node *node;
1733 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1734 int order_pos, new_order_pos = 0;
1735 int i;
1736
1737 order_pos = ipa_reverse_postorder (order);
1738 gcc_assert (order_pos == cgraph_n_nodes);
1739
1740 /* Garbage collector may remove inline clones we eliminate during
1741 optimization. So we must be sure to not reference them. */
1742 for (i = 0; i < order_pos; i++)
1743 if (order[i]->process)
1744 order[new_order_pos++] = order[i];
1745
1746 for (i = new_order_pos - 1; i >= 0; i--)
1747 {
1748 node = order[i];
1749 if (node->process)
1750 {
1751 node->process = 0;
1752 expand_function (node);
1753 }
1754 }
1755 cgraph_process_new_functions ();
1756
1757 free (order);
1758
1759 }
1760
1761 /* This is used to sort the node types by the cgraph order number. */
1762
1763 enum cgraph_order_sort_kind
1764 {
1765 ORDER_UNDEFINED = 0,
1766 ORDER_FUNCTION,
1767 ORDER_VAR,
1768 ORDER_ASM
1769 };
1770
1771 struct cgraph_order_sort
1772 {
1773 enum cgraph_order_sort_kind kind;
1774 union
1775 {
1776 struct cgraph_node *f;
1777 struct varpool_node *v;
1778 struct asm_node *a;
1779 } u;
1780 };
1781
1782 /* Output all functions, variables, and asm statements in the order
1783 according to their order fields, which is the order in which they
1784 appeared in the file. This implements -fno-toplevel-reorder. In
1785 this mode we may output functions and variables which don't really
1786 need to be output. */
1787
1788 static void
1789 output_in_order (void)
1790 {
1791 int max;
1792 struct cgraph_order_sort *nodes;
1793 int i;
1794 struct cgraph_node *pf;
1795 struct varpool_node *pv;
1796 struct asm_node *pa;
1797
1798 max = symtab_order;
1799 nodes = XCNEWVEC (struct cgraph_order_sort, max);
1800
1801 FOR_EACH_DEFINED_FUNCTION (pf)
1802 {
1803 if (pf->process && !pf->thunk.thunk_p && !pf->alias)
1804 {
1805 i = pf->symbol.order;
1806 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1807 nodes[i].kind = ORDER_FUNCTION;
1808 nodes[i].u.f = pf;
1809 }
1810 }
1811
1812 FOR_EACH_DEFINED_VARIABLE (pv)
1813 if (!DECL_EXTERNAL (pv->symbol.decl))
1814 {
1815 i = pv->symbol.order;
1816 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1817 nodes[i].kind = ORDER_VAR;
1818 nodes[i].u.v = pv;
1819 }
1820
1821 for (pa = asm_nodes; pa; pa = pa->next)
1822 {
1823 i = pa->order;
1824 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1825 nodes[i].kind = ORDER_ASM;
1826 nodes[i].u.a = pa;
1827 }
1828
1829 /* In toplevel reorder mode we output all statics; mark them as needed. */
1830
1831 for (i = 0; i < max; ++i)
1832 if (nodes[i].kind == ORDER_VAR)
1833 varpool_finalize_named_section_flags (nodes[i].u.v);
1834
1835 for (i = 0; i < max; ++i)
1836 {
1837 switch (nodes[i].kind)
1838 {
1839 case ORDER_FUNCTION:
1840 nodes[i].u.f->process = 0;
1841 expand_function (nodes[i].u.f);
1842 break;
1843
1844 case ORDER_VAR:
1845 varpool_assemble_decl (nodes[i].u.v);
1846 break;
1847
1848 case ORDER_ASM:
1849 assemble_asm (nodes[i].u.a->asm_str);
1850 break;
1851
1852 case ORDER_UNDEFINED:
1853 break;
1854
1855 default:
1856 gcc_unreachable ();
1857 }
1858 }
1859
1860 asm_nodes = NULL;
1861 free (nodes);
1862 }
1863
1864 static void
1865 ipa_passes (void)
1866 {
1867 set_cfun (NULL);
1868 current_function_decl = NULL;
1869 gimple_register_cfg_hooks ();
1870 bitmap_obstack_initialize (NULL);
1871
1872 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
1873
1874 if (!in_lto_p)
1875 {
1876 execute_ipa_pass_list (all_small_ipa_passes);
1877 if (seen_error ())
1878 return;
1879 }
1880
1881 /* We never run removal of unreachable nodes after early passes. This is
1882 because TODO is run before the subpasses. It is important to remove
1883 the unreachable functions to save works at IPA level and to get LTO
1884 symbol tables right. */
1885 symtab_remove_unreachable_nodes (true, cgraph_dump_file);
1886
1887 /* If pass_all_early_optimizations was not scheduled, the state of
1888 the cgraph will not be properly updated. Update it now. */
1889 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
1890 cgraph_state = CGRAPH_STATE_IPA_SSA;
1891
1892 if (!in_lto_p)
1893 {
1894 /* Generate coverage variables and constructors. */
1895 coverage_finish ();
1896
1897 /* Process new functions added. */
1898 set_cfun (NULL);
1899 current_function_decl = NULL;
1900 cgraph_process_new_functions ();
1901
1902 execute_ipa_summary_passes
1903 ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
1904 }
1905
1906 /* Some targets need to handle LTO assembler output specially. */
1907 if (flag_generate_lto)
1908 targetm.asm_out.lto_start ();
1909
1910 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
1911
1912 if (!in_lto_p)
1913 ipa_write_summaries ();
1914
1915 if (flag_generate_lto)
1916 targetm.asm_out.lto_end ();
1917
1918 if (!flag_ltrans && (in_lto_p || !flag_lto || flag_fat_lto_objects))
1919 execute_ipa_pass_list (all_regular_ipa_passes);
1920 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
1921
1922 bitmap_obstack_release (NULL);
1923 }
1924
1925
1926 /* Return string alias is alias of. */
1927
1928 static tree
1929 get_alias_symbol (tree decl)
1930 {
1931 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
1932 return get_identifier (TREE_STRING_POINTER
1933 (TREE_VALUE (TREE_VALUE (alias))));
1934 }
1935
1936
1937 /* Weakrefs may be associated to external decls and thus not output
1938 at expansion time. Emit all necessary aliases. */
1939
1940 static void
1941 output_weakrefs (void)
1942 {
1943 struct cgraph_node *node;
1944 struct varpool_node *vnode;
1945 FOR_EACH_FUNCTION (node)
1946 if (node->alias && DECL_EXTERNAL (node->symbol.decl)
1947 && !TREE_ASM_WRITTEN (node->symbol.decl)
1948 && lookup_attribute ("weakref", DECL_ATTRIBUTES (node->symbol.decl)))
1949 do_assemble_alias (node->symbol.decl,
1950 node->thunk.alias && DECL_P (node->thunk.alias) ? DECL_ASSEMBLER_NAME (node->thunk.alias)
1951 : get_alias_symbol (node->symbol.decl));
1952 FOR_EACH_VARIABLE (vnode)
1953 if (vnode->alias && DECL_EXTERNAL (vnode->symbol.decl)
1954 && !TREE_ASM_WRITTEN (vnode->symbol.decl)
1955 && lookup_attribute ("weakref", DECL_ATTRIBUTES (vnode->symbol.decl)))
1956 do_assemble_alias (vnode->symbol.decl,
1957 vnode->alias_of && DECL_P (vnode->alias_of) ? DECL_ASSEMBLER_NAME (vnode->alias_of)
1958 : get_alias_symbol (vnode->symbol.decl));
1959 }
1960
1961 /* Initialize callgraph dump file. */
1962
1963 void
1964 init_cgraph (void)
1965 {
1966 if (!cgraph_dump_file)
1967 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1968 }
1969
1970
1971 /* Perform simple optimizations based on callgraph. */
1972
1973 void
1974 compile (void)
1975 {
1976 if (seen_error ())
1977 return;
1978
1979 #ifdef ENABLE_CHECKING
1980 verify_symtab ();
1981 #endif
1982
1983 timevar_push (TV_CGRAPHOPT);
1984 if (pre_ipa_mem_report)
1985 {
1986 fprintf (stderr, "Memory consumption before IPA\n");
1987 dump_memory_report (false);
1988 }
1989 if (!quiet_flag)
1990 fprintf (stderr, "Performing interprocedural optimizations\n");
1991 cgraph_state = CGRAPH_STATE_IPA;
1992
1993 /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
1994 if (flag_lto)
1995 lto_streamer_hooks_init ();
1996
1997 /* Don't run the IPA passes if there was any error or sorry messages. */
1998 if (!seen_error ())
1999 ipa_passes ();
2000
2001 /* Do nothing else if any IPA pass found errors or if we are just streaming LTO. */
2002 if (seen_error ()
2003 || (!in_lto_p && flag_lto && !flag_fat_lto_objects))
2004 {
2005 timevar_pop (TV_CGRAPHOPT);
2006 return;
2007 }
2008
2009 /* This pass remove bodies of extern inline functions we never inlined.
2010 Do this later so other IPA passes see what is really going on. */
2011 symtab_remove_unreachable_nodes (false, dump_file);
2012 cgraph_global_info_ready = true;
2013 if (cgraph_dump_file)
2014 {
2015 fprintf (cgraph_dump_file, "Optimized ");
2016 dump_symtab (cgraph_dump_file);
2017 }
2018 if (post_ipa_mem_report)
2019 {
2020 fprintf (stderr, "Memory consumption after IPA\n");
2021 dump_memory_report (false);
2022 }
2023 timevar_pop (TV_CGRAPHOPT);
2024
2025 /* Output everything. */
2026 (*debug_hooks->assembly_start) ();
2027 if (!quiet_flag)
2028 fprintf (stderr, "Assembling functions:\n");
2029 #ifdef ENABLE_CHECKING
2030 verify_symtab ();
2031 #endif
2032
2033 cgraph_materialize_all_clones ();
2034 bitmap_obstack_initialize (NULL);
2035 execute_ipa_pass_list (all_late_ipa_passes);
2036 symtab_remove_unreachable_nodes (true, dump_file);
2037 #ifdef ENABLE_CHECKING
2038 verify_symtab ();
2039 #endif
2040 bitmap_obstack_release (NULL);
2041 mark_functions_to_output ();
2042
2043 cgraph_state = CGRAPH_STATE_EXPANSION;
2044 if (!flag_toplevel_reorder)
2045 output_in_order ();
2046 else
2047 {
2048 output_asm_statements ();
2049
2050 expand_all_functions ();
2051 varpool_output_variables ();
2052 }
2053
2054 cgraph_process_new_functions ();
2055 cgraph_state = CGRAPH_STATE_FINISHED;
2056 output_weakrefs ();
2057
2058 if (cgraph_dump_file)
2059 {
2060 fprintf (cgraph_dump_file, "\nFinal ");
2061 dump_symtab (cgraph_dump_file);
2062 }
2063 #ifdef ENABLE_CHECKING
2064 verify_symtab ();
2065 /* Double check that all inline clones are gone and that all
2066 function bodies have been released from memory. */
2067 if (!seen_error ())
2068 {
2069 struct cgraph_node *node;
2070 bool error_found = false;
2071
2072 FOR_EACH_DEFINED_FUNCTION (node)
2073 if (node->global.inlined_to
2074 || gimple_has_body_p (node->symbol.decl))
2075 {
2076 error_found = true;
2077 dump_cgraph_node (stderr, node);
2078 }
2079 if (error_found)
2080 internal_error ("nodes with unreleased memory found");
2081 }
2082 #endif
2083 }
2084
2085
2086 /* Analyze the whole compilation unit once it is parsed completely. */
2087
2088 void
2089 finalize_compilation_unit (void)
2090 {
2091 timevar_push (TV_CGRAPH);
2092
2093 /* If we're here there's no current function anymore. Some frontends
2094 are lazy in clearing these. */
2095 current_function_decl = NULL;
2096 set_cfun (NULL);
2097
2098 /* Do not skip analyzing the functions if there were errors, we
2099 miss diagnostics for following functions otherwise. */
2100
2101 /* Emit size functions we didn't inline. */
2102 finalize_size_functions ();
2103
2104 /* Mark alias targets necessary and emit diagnostics. */
2105 handle_alias_pairs ();
2106
2107 if (!quiet_flag)
2108 {
2109 fprintf (stderr, "\nAnalyzing compilation unit\n");
2110 fflush (stderr);
2111 }
2112
2113 if (flag_dump_passes)
2114 dump_passes ();
2115
2116 /* Gimplify and lower all functions, compute reachability and
2117 remove unreachable nodes. */
2118 cgraph_analyze_functions ();
2119
2120 /* Mark alias targets necessary and emit diagnostics. */
2121 handle_alias_pairs ();
2122
2123 /* Gimplify and lower thunks. */
2124 cgraph_analyze_functions ();
2125
2126 /* Finally drive the pass manager. */
2127 compile ();
2128
2129 timevar_pop (TV_CGRAPH);
2130 }
2131
2132
2133 #include "gt-cgraphunit.h"