cgraph.h: Update copyrights;
[gcc.git] / gcc / cgraphunit.c
1 /* Callgraph based interprocedural optimizations.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
3 2011, 2012 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This module implements main driver of compilation process as well as
23 few basic interprocedural optimizers.
24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
35 (There is one exception needed for implementing GCC extern inline
36 function.)
37
38 - varpool_finalize_variable
39
40 This function has same behavior as the above but is used for static
41 variables.
42
43 - cgraph_finalize_compilation_unit
44
45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
47
48 In the call-graph construction and local function analysis takes
49 place here. Bodies of unreachable functions are released to
50 conserve memory usage.
51
52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
54
55 - cgraph_optimize
56
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
61
62 - cgraph_mark_needed_node
63 - varpool_mark_needed_node
64
65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
69 used by C++ frontend to explicitly mark the keyed methods.
70
71 - analyze_expr callback
72
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
76
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
79
80 Analyzing of all functions is deferred
81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
82
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
90
91 The intra-procedural information is produced and its existence
92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
95
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
98
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
103
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
106 where reference has been optimized out. */
107
108
109 #include "config.h"
110 #include "system.h"
111 #include "coretypes.h"
112 #include "tm.h"
113 #include "tree.h"
114 #include "output.h"
115 #include "rtl.h"
116 #include "tree-flow.h"
117 #include "tree-inline.h"
118 #include "langhooks.h"
119 #include "pointer-set.h"
120 #include "toplev.h"
121 #include "flags.h"
122 #include "ggc.h"
123 #include "debug.h"
124 #include "target.h"
125 #include "cgraph.h"
126 #include "diagnostic.h"
127 #include "tree-pretty-print.h"
128 #include "gimple-pretty-print.h"
129 #include "timevar.h"
130 #include "params.h"
131 #include "fibheap.h"
132 #include "intl.h"
133 #include "function.h"
134 #include "ipa-prop.h"
135 #include "gimple.h"
136 #include "tree-iterator.h"
137 #include "tree-pass.h"
138 #include "tree-dump.h"
139 #include "output.h"
140 #include "coverage.h"
141 #include "plugin.h"
142 #include "ipa-inline.h"
143 #include "ipa-utils.h"
144 #include "lto-streamer.h"
145 #include "except.h"
146 #include "regset.h" /* FIXME: For reg_obstack. */
147
148 static void cgraph_expand_all_functions (void);
149 static void cgraph_mark_functions_to_output (void);
150 static void cgraph_expand_function (struct cgraph_node *);
151 static void cgraph_output_pending_asms (void);
152 static void tree_rest_of_compilation (struct cgraph_node *);
153
154 FILE *cgraph_dump_file;
155
156 /* Used for vtable lookup in thunk adjusting. */
157 static GTY (()) tree vtable_entry_type;
158
159 /* Determine if function DECL is needed. That is, visible to something
160 either outside this translation unit, something magic in the system
161 configury. */
162
163 bool
164 cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
165 {
166 /* If the user told us it is used, then it must be so. */
167 if (node->symbol.externally_visible)
168 return true;
169
170 /* ??? If the assembler name is set by hand, it is possible to assemble
171 the name later after finalizing the function and the fact is noticed
172 in assemble_name then. This is arguably a bug. */
173 if (DECL_ASSEMBLER_NAME_SET_P (decl)
174 && (!node->thunk.thunk_p && !node->same_body_alias)
175 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
176 return true;
177
178 /* With -fkeep-inline-functions we are keeping all inline functions except
179 for extern inline ones. */
180 if (flag_keep_inline_functions
181 && DECL_DECLARED_INLINE_P (decl)
182 && !DECL_EXTERNAL (decl)
183 && !DECL_DISREGARD_INLINE_LIMITS (decl))
184 return true;
185
186 /* If we decided it was needed before, but at the time we didn't have
187 the body of the function available, then it's still needed. We have
188 to go back and re-check its dependencies now. */
189 if (node->needed)
190 return true;
191
192 /* Externally visible functions must be output. The exception is
193 COMDAT functions that must be output only when they are needed.
194
195 When not optimizing, also output the static functions. (see
196 PR24561), but don't do so for always_inline functions, functions
197 declared inline and nested functions. These were optimized out
198 in the original implementation and it is unclear whether we want
199 to change the behavior here. */
200 if (((TREE_PUBLIC (decl)
201 || (!optimize
202 && !node->same_body_alias
203 && !DECL_DISREGARD_INLINE_LIMITS (decl)
204 && !DECL_DECLARED_INLINE_P (decl)
205 && !(DECL_CONTEXT (decl)
206 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)))
207 && !flag_whole_program
208 && !flag_lto)
209 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
210 return true;
211
212 return false;
213 }
214
215 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
216 functions into callgraph in a way so they look like ordinary reachable
217 functions inserted into callgraph already at construction time. */
218
219 bool
220 cgraph_process_new_functions (void)
221 {
222 bool output = false;
223 tree fndecl;
224 struct cgraph_node *node;
225
226 varpool_analyze_pending_decls ();
227 /* Note that this queue may grow as its being processed, as the new
228 functions may generate new ones. */
229 while (cgraph_new_nodes)
230 {
231 node = cgraph_new_nodes;
232 fndecl = node->symbol.decl;
233 cgraph_new_nodes = cgraph_new_nodes->next_needed;
234 switch (cgraph_state)
235 {
236 case CGRAPH_STATE_CONSTRUCTION:
237 /* At construction time we just need to finalize function and move
238 it into reachable functions list. */
239
240 node->next_needed = NULL;
241 cgraph_finalize_function (fndecl, false);
242 cgraph_mark_reachable_node (node);
243 output = true;
244 cgraph_call_function_insertion_hooks (node);
245 break;
246
247 case CGRAPH_STATE_IPA:
248 case CGRAPH_STATE_IPA_SSA:
249 /* When IPA optimization already started, do all essential
250 transformations that has been already performed on the whole
251 cgraph but not on this function. */
252
253 gimple_register_cfg_hooks ();
254 if (!node->analyzed)
255 cgraph_analyze_function (node);
256 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
257 current_function_decl = fndecl;
258 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
259 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
260 /* When not optimizing, be sure we run early local passes anyway
261 to expand OMP. */
262 || !optimize)
263 execute_pass_list (pass_early_local_passes.pass.sub);
264 else
265 compute_inline_parameters (node, true);
266 free_dominance_info (CDI_POST_DOMINATORS);
267 free_dominance_info (CDI_DOMINATORS);
268 pop_cfun ();
269 current_function_decl = NULL;
270 cgraph_call_function_insertion_hooks (node);
271 break;
272
273 case CGRAPH_STATE_EXPANSION:
274 /* Functions created during expansion shall be compiled
275 directly. */
276 node->process = 0;
277 cgraph_call_function_insertion_hooks (node);
278 cgraph_expand_function (node);
279 break;
280
281 default:
282 gcc_unreachable ();
283 break;
284 }
285 varpool_analyze_pending_decls ();
286 }
287 return output;
288 }
289
290 /* As an GCC extension we allow redefinition of the function. The
291 semantics when both copies of bodies differ is not well defined.
292 We replace the old body with new body so in unit at a time mode
293 we always use new body, while in normal mode we may end up with
294 old body inlined into some functions and new body expanded and
295 inlined in others.
296
297 ??? It may make more sense to use one body for inlining and other
298 body for expanding the function but this is difficult to do. */
299
300 static void
301 cgraph_reset_node (struct cgraph_node *node)
302 {
303 /* If node->process is set, then we have already begun whole-unit analysis.
304 This is *not* testing for whether we've already emitted the function.
305 That case can be sort-of legitimately seen with real function redefinition
306 errors. I would argue that the front end should never present us with
307 such a case, but don't enforce that for now. */
308 gcc_assert (!node->process);
309
310 /* Reset our data structures so we can analyze the function again. */
311 memset (&node->local, 0, sizeof (node->local));
312 memset (&node->global, 0, sizeof (node->global));
313 memset (&node->rtl, 0, sizeof (node->rtl));
314 node->analyzed = false;
315 node->local.finalized = false;
316
317 cgraph_node_remove_callees (node);
318 }
319
320 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
321 logic in effect. If NESTED is true, then our caller cannot stand to have
322 the garbage collector run at the moment. We would need to either create
323 a new GC context, or just not compile right now. */
324
325 void
326 cgraph_finalize_function (tree decl, bool nested)
327 {
328 struct cgraph_node *node = cgraph_get_create_node (decl);
329
330 if (node->local.finalized)
331 {
332 cgraph_reset_node (node);
333 node->local.redefined_extern_inline = true;
334 }
335
336 notice_global_symbol (decl);
337 node->local.finalized = true;
338 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
339
340 if (cgraph_decide_is_function_needed (node, decl))
341 cgraph_mark_needed_node (node);
342
343 /* Since we reclaim unreachable nodes at the end of every language
344 level unit, we need to be conservative about possible entry points
345 there. */
346 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
347 || DECL_STATIC_CONSTRUCTOR (decl)
348 || DECL_STATIC_DESTRUCTOR (decl)
349 /* COMDAT virtual functions may be referenced by vtable from
350 other compilation unit. Still we want to devirtualize calls
351 to those so we need to analyze them.
352 FIXME: We should introduce may edges for this purpose and update
353 their handling in unreachable function removal and inliner too. */
354 || (DECL_VIRTUAL_P (decl)
355 && optimize && (DECL_COMDAT (decl) || DECL_EXTERNAL (decl))))
356 cgraph_mark_reachable_node (node);
357
358 /* If we've not yet emitted decl, tell the debug info about it. */
359 if (!TREE_ASM_WRITTEN (decl))
360 (*debug_hooks->deferred_inline_function) (decl);
361
362 /* Possibly warn about unused parameters. */
363 if (warn_unused_parameter)
364 do_warn_unused_parameter (decl);
365
366 if (!nested)
367 ggc_collect ();
368 }
369
370 /* Add the function FNDECL to the call graph.
371 Unlike cgraph_finalize_function, this function is intended to be used
372 by middle end and allows insertion of new function at arbitrary point
373 of compilation. The function can be either in high, low or SSA form
374 GIMPLE.
375
376 The function is assumed to be reachable and have address taken (so no
377 API breaking optimizations are performed on it).
378
379 Main work done by this function is to enqueue the function for later
380 processing to avoid need the passes to be re-entrant. */
381
382 void
383 cgraph_add_new_function (tree fndecl, bool lowered)
384 {
385 struct cgraph_node *node;
386 switch (cgraph_state)
387 {
388 case CGRAPH_STATE_CONSTRUCTION:
389 /* Just enqueue function to be processed at nearest occurrence. */
390 node = cgraph_create_node (fndecl);
391 node->next_needed = cgraph_new_nodes;
392 if (lowered)
393 node->lowered = true;
394 cgraph_new_nodes = node;
395 break;
396
397 case CGRAPH_STATE_IPA:
398 case CGRAPH_STATE_IPA_SSA:
399 case CGRAPH_STATE_EXPANSION:
400 /* Bring the function into finalized state and enqueue for later
401 analyzing and compilation. */
402 node = cgraph_get_create_node (fndecl);
403 node->local.local = false;
404 node->local.finalized = true;
405 node->reachable = node->needed = true;
406 if (!lowered && cgraph_state == CGRAPH_STATE_EXPANSION)
407 {
408 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
409 current_function_decl = fndecl;
410 gimple_register_cfg_hooks ();
411 bitmap_obstack_initialize (NULL);
412 execute_pass_list (all_lowering_passes);
413 execute_pass_list (pass_early_local_passes.pass.sub);
414 bitmap_obstack_release (NULL);
415 pop_cfun ();
416 current_function_decl = NULL;
417
418 lowered = true;
419 }
420 if (lowered)
421 node->lowered = true;
422 node->next_needed = cgraph_new_nodes;
423 cgraph_new_nodes = node;
424 break;
425
426 case CGRAPH_STATE_FINISHED:
427 /* At the very end of compilation we have to do all the work up
428 to expansion. */
429 node = cgraph_create_node (fndecl);
430 if (lowered)
431 node->lowered = true;
432 cgraph_analyze_function (node);
433 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
434 current_function_decl = fndecl;
435 gimple_register_cfg_hooks ();
436 bitmap_obstack_initialize (NULL);
437 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
438 execute_pass_list (pass_early_local_passes.pass.sub);
439 bitmap_obstack_release (NULL);
440 tree_rest_of_compilation (node);
441 pop_cfun ();
442 current_function_decl = NULL;
443 break;
444
445 default:
446 gcc_unreachable ();
447 }
448
449 /* Set a personality if required and we already passed EH lowering. */
450 if (lowered
451 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
452 == eh_personality_lang))
453 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
454 }
455
456 /* C99 extern inline keywords allow changing of declaration after function
457 has been finalized. We need to re-decide if we want to mark the function as
458 needed then. */
459
460 void
461 cgraph_mark_if_needed (tree decl)
462 {
463 struct cgraph_node *node = cgraph_get_node (decl);
464 if (node->local.finalized && cgraph_decide_is_function_needed (node, decl))
465 cgraph_mark_needed_node (node);
466 }
467
468 /* Return TRUE if NODE2 is equivalent to NODE or its clone. */
469 static bool
470 clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
471 {
472 node = cgraph_function_or_thunk_node (node, NULL);
473 node2 = cgraph_function_or_thunk_node (node2, NULL);
474 while (node != node2 && node2)
475 node2 = node2->clone_of;
476 return node2 != NULL;
477 }
478
479 /* Verify edge E count and frequency. */
480
481 static bool
482 verify_edge_count_and_frequency (struct cgraph_edge *e)
483 {
484 bool error_found = false;
485 if (e->count < 0)
486 {
487 error ("caller edge count is negative");
488 error_found = true;
489 }
490 if (e->frequency < 0)
491 {
492 error ("caller edge frequency is negative");
493 error_found = true;
494 }
495 if (e->frequency > CGRAPH_FREQ_MAX)
496 {
497 error ("caller edge frequency is too large");
498 error_found = true;
499 }
500 if (gimple_has_body_p (e->caller->symbol.decl)
501 && !e->caller->global.inlined_to
502 /* FIXME: Inline-analysis sets frequency to 0 when edge is optimized out.
503 Remove this once edges are actualy removed from the function at that time. */
504 && (e->frequency
505 || (inline_edge_summary_vec
506 && ((VEC_length(inline_edge_summary_t, inline_edge_summary_vec)
507 <= (unsigned) e->uid)
508 || !inline_edge_summary (e)->predicate)))
509 && (e->frequency
510 != compute_call_stmt_bb_frequency (e->caller->symbol.decl,
511 gimple_bb (e->call_stmt))))
512 {
513 error ("caller edge frequency %i does not match BB frequency %i",
514 e->frequency,
515 compute_call_stmt_bb_frequency (e->caller->symbol.decl,
516 gimple_bb (e->call_stmt)));
517 error_found = true;
518 }
519 return error_found;
520 }
521
522 /* Switch to THIS_CFUN if needed and print STMT to stderr. */
523 static void
524 cgraph_debug_gimple_stmt (struct function *this_cfun, gimple stmt)
525 {
526 /* debug_gimple_stmt needs correct cfun */
527 if (cfun != this_cfun)
528 set_cfun (this_cfun);
529 debug_gimple_stmt (stmt);
530 }
531
532 /* Verify that call graph edge E corresponds to DECL from the associated
533 statement. Return true if the verification should fail. */
534
535 static bool
536 verify_edge_corresponds_to_fndecl (struct cgraph_edge *e, tree decl)
537 {
538 struct cgraph_node *node;
539
540 if (!decl || e->callee->global.inlined_to)
541 return false;
542 node = cgraph_get_node (decl);
543
544 /* We do not know if a node from a different partition is an alias or what it
545 aliases and therefore cannot do the former_clone_of check reliably. */
546 if (!node || node->symbol.in_other_partition)
547 return false;
548 node = cgraph_function_or_thunk_node (node, NULL);
549
550 if ((e->callee->former_clone_of != node->symbol.decl
551 && (!node->same_body_alias
552 || e->callee->former_clone_of != node->thunk.alias))
553 /* IPA-CP sometimes redirect edge to clone and then back to the former
554 function. This ping-pong has to go, eventually. */
555 && (node != cgraph_function_or_thunk_node (e->callee, NULL))
556 && !clone_of_p (node, e->callee)
557 /* If decl is a same body alias of some other decl, allow e->callee to be
558 a clone of a clone of that other decl too. */
559 && (!node->same_body_alias
560 || !clone_of_p (cgraph_get_node (node->thunk.alias), e->callee)))
561 return true;
562 else
563 return false;
564 }
565
566 /* Verify cgraph nodes of given cgraph node. */
567 DEBUG_FUNCTION void
568 verify_cgraph_node (struct cgraph_node *node)
569 {
570 struct cgraph_edge *e;
571 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->symbol.decl);
572 basic_block this_block;
573 gimple_stmt_iterator gsi;
574 bool error_found = false;
575
576 if (seen_error ())
577 return;
578
579 timevar_push (TV_CGRAPH_VERIFY);
580 for (e = node->callees; e; e = e->next_callee)
581 if (e->aux)
582 {
583 error ("aux field set for edge %s->%s",
584 identifier_to_locale (cgraph_node_name (e->caller)),
585 identifier_to_locale (cgraph_node_name (e->callee)));
586 error_found = true;
587 }
588 if (node->count < 0)
589 {
590 error ("execution count is negative");
591 error_found = true;
592 }
593 if (node->global.inlined_to && node->symbol.externally_visible)
594 {
595 error ("externally visible inline clone");
596 error_found = true;
597 }
598 if (node->global.inlined_to && node->symbol.address_taken)
599 {
600 error ("inline clone with address taken");
601 error_found = true;
602 }
603 if (node->global.inlined_to && node->needed)
604 {
605 error ("inline clone is needed");
606 error_found = true;
607 }
608 for (e = node->indirect_calls; e; e = e->next_callee)
609 {
610 if (e->aux)
611 {
612 error ("aux field set for indirect edge from %s",
613 identifier_to_locale (cgraph_node_name (e->caller)));
614 error_found = true;
615 }
616 if (!e->indirect_unknown_callee
617 || !e->indirect_info)
618 {
619 error ("An indirect edge from %s is not marked as indirect or has "
620 "associated indirect_info, the corresponding statement is: ",
621 identifier_to_locale (cgraph_node_name (e->caller)));
622 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
623 error_found = true;
624 }
625 }
626 for (e = node->callers; e; e = e->next_caller)
627 {
628 if (verify_edge_count_and_frequency (e))
629 error_found = true;
630 if (!e->inline_failed)
631 {
632 if (node->global.inlined_to
633 != (e->caller->global.inlined_to
634 ? e->caller->global.inlined_to : e->caller))
635 {
636 error ("inlined_to pointer is wrong");
637 error_found = true;
638 }
639 if (node->callers->next_caller)
640 {
641 error ("multiple inline callers");
642 error_found = true;
643 }
644 }
645 else
646 if (node->global.inlined_to)
647 {
648 error ("inlined_to pointer set for noninline callers");
649 error_found = true;
650 }
651 }
652 for (e = node->indirect_calls; e; e = e->next_callee)
653 if (verify_edge_count_and_frequency (e))
654 error_found = true;
655 if (!node->callers && node->global.inlined_to)
656 {
657 error ("inlined_to pointer is set but no predecessors found");
658 error_found = true;
659 }
660 if (node->global.inlined_to == node)
661 {
662 error ("inlined_to pointer refers to itself");
663 error_found = true;
664 }
665
666 if (!cgraph_get_node (node->symbol.decl))
667 {
668 error ("node not found in cgraph_hash");
669 error_found = true;
670 }
671
672 if (node->clone_of)
673 {
674 struct cgraph_node *n;
675 for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
676 if (n == node)
677 break;
678 if (!n)
679 {
680 error ("node has wrong clone_of");
681 error_found = true;
682 }
683 }
684 if (node->clones)
685 {
686 struct cgraph_node *n;
687 for (n = node->clones; n; n = n->next_sibling_clone)
688 if (n->clone_of != node)
689 break;
690 if (n)
691 {
692 error ("node has wrong clone list");
693 error_found = true;
694 }
695 }
696 if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
697 {
698 error ("node is in clone list but it is not clone");
699 error_found = true;
700 }
701 if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
702 {
703 error ("node has wrong prev_clone pointer");
704 error_found = true;
705 }
706 if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
707 {
708 error ("double linked list of clones corrupted");
709 error_found = true;
710 }
711 if (node->symbol.same_comdat_group)
712 {
713 symtab_node n = node->symbol.same_comdat_group;
714
715 if (!DECL_ONE_ONLY (n->symbol.decl))
716 {
717 error ("non-DECL_ONE_ONLY node in a same_comdat_group list");
718 error_found = true;
719 }
720 if (n == (symtab_node)node)
721 {
722 error ("node is alone in a comdat group");
723 error_found = true;
724 }
725 do
726 {
727 if (!n->symbol.same_comdat_group)
728 {
729 error ("same_comdat_group is not a circular list");
730 error_found = true;
731 break;
732 }
733 n = n->symbol.same_comdat_group;
734 }
735 while (n != (symtab_node)node);
736 }
737
738 if (node->analyzed && node->alias)
739 {
740 bool ref_found = false;
741 int i;
742 struct ipa_ref *ref;
743
744 if (node->callees)
745 {
746 error ("Alias has call edges");
747 error_found = true;
748 }
749 for (i = 0; ipa_ref_list_reference_iterate (&node->symbol.ref_list,
750 i, ref); i++)
751 if (ref->use != IPA_REF_ALIAS)
752 {
753 error ("Alias has non-alias reference");
754 error_found = true;
755 }
756 else if (ref_found)
757 {
758 error ("Alias has more than one alias reference");
759 error_found = true;
760 }
761 else
762 ref_found = true;
763 if (!ref_found)
764 {
765 error ("Analyzed alias has no reference");
766 error_found = true;
767 }
768 }
769 if (node->analyzed && node->thunk.thunk_p)
770 {
771 if (!node->callees)
772 {
773 error ("No edge out of thunk node");
774 error_found = true;
775 }
776 else if (node->callees->next_callee)
777 {
778 error ("More than one edge out of thunk node");
779 error_found = true;
780 }
781 if (gimple_has_body_p (node->symbol.decl))
782 {
783 error ("Thunk is not supposed to have body");
784 error_found = true;
785 }
786 }
787 else if (node->analyzed && gimple_has_body_p (node->symbol.decl)
788 && !TREE_ASM_WRITTEN (node->symbol.decl)
789 && (!DECL_EXTERNAL (node->symbol.decl) || node->global.inlined_to)
790 && !flag_wpa)
791 {
792 if (this_cfun->cfg)
793 {
794 /* The nodes we're interested in are never shared, so walk
795 the tree ignoring duplicates. */
796 struct pointer_set_t *visited_nodes = pointer_set_create ();
797 /* Reach the trees by walking over the CFG, and note the
798 enclosing basic-blocks in the call edges. */
799 FOR_EACH_BB_FN (this_block, this_cfun)
800 for (gsi = gsi_start_bb (this_block);
801 !gsi_end_p (gsi);
802 gsi_next (&gsi))
803 {
804 gimple stmt = gsi_stmt (gsi);
805 if (is_gimple_call (stmt))
806 {
807 struct cgraph_edge *e = cgraph_edge (node, stmt);
808 tree decl = gimple_call_fndecl (stmt);
809 if (e)
810 {
811 if (e->aux)
812 {
813 error ("shared call_stmt:");
814 cgraph_debug_gimple_stmt (this_cfun, stmt);
815 error_found = true;
816 }
817 if (!e->indirect_unknown_callee)
818 {
819 if (verify_edge_corresponds_to_fndecl (e, decl))
820 {
821 error ("edge points to wrong declaration:");
822 debug_tree (e->callee->symbol.decl);
823 fprintf (stderr," Instead of:");
824 debug_tree (decl);
825 error_found = true;
826 }
827 }
828 else if (decl)
829 {
830 error ("an indirect edge with unknown callee "
831 "corresponding to a call_stmt with "
832 "a known declaration:");
833 error_found = true;
834 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
835 }
836 e->aux = (void *)1;
837 }
838 else if (decl)
839 {
840 error ("missing callgraph edge for call stmt:");
841 cgraph_debug_gimple_stmt (this_cfun, stmt);
842 error_found = true;
843 }
844 }
845 }
846 pointer_set_destroy (visited_nodes);
847 }
848 else
849 /* No CFG available?! */
850 gcc_unreachable ();
851
852 for (e = node->callees; e; e = e->next_callee)
853 {
854 if (!e->aux)
855 {
856 error ("edge %s->%s has no corresponding call_stmt",
857 identifier_to_locale (cgraph_node_name (e->caller)),
858 identifier_to_locale (cgraph_node_name (e->callee)));
859 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
860 error_found = true;
861 }
862 e->aux = 0;
863 }
864 for (e = node->indirect_calls; e; e = e->next_callee)
865 {
866 if (!e->aux)
867 {
868 error ("an indirect edge from %s has no corresponding call_stmt",
869 identifier_to_locale (cgraph_node_name (e->caller)));
870 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
871 error_found = true;
872 }
873 e->aux = 0;
874 }
875 }
876 if (error_found)
877 {
878 dump_cgraph_node (stderr, node);
879 internal_error ("verify_cgraph_node failed");
880 }
881 timevar_pop (TV_CGRAPH_VERIFY);
882 }
883
884 /* Verify whole cgraph structure. */
885 DEBUG_FUNCTION void
886 verify_cgraph (void)
887 {
888 struct cgraph_node *node;
889
890 if (seen_error ())
891 return;
892
893 for (node = cgraph_nodes; node; node = node->next)
894 verify_cgraph_node (node);
895 }
896
897 /* Output all asm statements we have stored up to be output. */
898
899 static void
900 cgraph_output_pending_asms (void)
901 {
902 struct cgraph_asm_node *can;
903
904 if (seen_error ())
905 return;
906
907 for (can = cgraph_asm_nodes; can; can = can->next)
908 assemble_asm (can->asm_str);
909 cgraph_asm_nodes = NULL;
910 }
911
912 /* Analyze the function scheduled to be output. */
913 void
914 cgraph_analyze_function (struct cgraph_node *node)
915 {
916 tree save = current_function_decl;
917 tree decl = node->symbol.decl;
918
919 if (node->alias && node->thunk.alias)
920 {
921 struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias);
922 struct cgraph_node *n;
923
924 for (n = tgt; n && n->alias;
925 n = n->analyzed ? cgraph_alias_aliased_node (n) : NULL)
926 if (n == node)
927 {
928 error ("function %q+D part of alias cycle", node->symbol.decl);
929 node->alias = false;
930 return;
931 }
932 if (!VEC_length (ipa_ref_t, node->symbol.ref_list.references))
933 ipa_record_reference (node, NULL, tgt, NULL, IPA_REF_ALIAS, NULL);
934 if (node->same_body_alias)
935 {
936 DECL_VIRTUAL_P (node->symbol.decl) = DECL_VIRTUAL_P (node->thunk.alias);
937 DECL_DECLARED_INLINE_P (node->symbol.decl)
938 = DECL_DECLARED_INLINE_P (node->thunk.alias);
939 DECL_DISREGARD_INLINE_LIMITS (node->symbol.decl)
940 = DECL_DISREGARD_INLINE_LIMITS (node->thunk.alias);
941 }
942
943 /* Fixup visibility nonsences C++ frontend produce on same body aliases. */
944 if (TREE_PUBLIC (node->symbol.decl) && node->same_body_alias)
945 {
946 DECL_EXTERNAL (node->symbol.decl) = DECL_EXTERNAL (node->thunk.alias);
947 if (DECL_ONE_ONLY (node->thunk.alias))
948 {
949 DECL_COMDAT (node->symbol.decl) = DECL_COMDAT (node->thunk.alias);
950 DECL_COMDAT_GROUP (node->symbol.decl) = DECL_COMDAT_GROUP (node->thunk.alias);
951 if (DECL_ONE_ONLY (node->thunk.alias) && !node->symbol.same_comdat_group)
952 {
953 struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias);
954 node->symbol.same_comdat_group = (symtab_node)tgt;
955 if (!tgt->symbol.same_comdat_group)
956 tgt->symbol.same_comdat_group = (symtab_node)node;
957 else
958 {
959 symtab_node n;
960 for (n = tgt->symbol.same_comdat_group;
961 n->symbol.same_comdat_group != (symtab_node)tgt;
962 n = n->symbol.same_comdat_group)
963 ;
964 n->symbol.same_comdat_group = (symtab_node)node;
965 }
966 }
967 }
968 }
969 cgraph_mark_reachable_node (cgraph_alias_aliased_node (node));
970 if (node->symbol.address_taken)
971 cgraph_mark_address_taken_node (cgraph_alias_aliased_node (node));
972 if (cgraph_decide_is_function_needed (node, node->symbol.decl))
973 cgraph_mark_needed_node (node);
974 }
975 else if (node->thunk.thunk_p)
976 {
977 cgraph_create_edge (node, cgraph_get_node (node->thunk.alias),
978 NULL, 0, CGRAPH_FREQ_BASE);
979 }
980 else
981 {
982 current_function_decl = decl;
983 push_cfun (DECL_STRUCT_FUNCTION (decl));
984
985 assign_assembler_name_if_neeeded (node->symbol.decl);
986
987 /* Make sure to gimplify bodies only once. During analyzing a
988 function we lower it, which will require gimplified nested
989 functions, so we can end up here with an already gimplified
990 body. */
991 if (!gimple_body (decl))
992 gimplify_function_tree (decl);
993 dump_function (TDI_generic, decl);
994
995 /* Lower the function. */
996 if (!node->lowered)
997 {
998 if (node->nested)
999 lower_nested_functions (node->symbol.decl);
1000 gcc_assert (!node->nested);
1001
1002 gimple_register_cfg_hooks ();
1003 bitmap_obstack_initialize (NULL);
1004 execute_pass_list (all_lowering_passes);
1005 free_dominance_info (CDI_POST_DOMINATORS);
1006 free_dominance_info (CDI_DOMINATORS);
1007 compact_blocks ();
1008 bitmap_obstack_release (NULL);
1009 node->lowered = true;
1010 }
1011
1012 pop_cfun ();
1013 }
1014 node->analyzed = true;
1015
1016 current_function_decl = save;
1017 }
1018
1019 /* C++ frontend produce same body aliases all over the place, even before PCH
1020 gets streamed out. It relies on us linking the aliases with their function
1021 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
1022 first produce aliases without links, but once C++ FE is sure he won't sream
1023 PCH we build the links via this function. */
1024
1025 void
1026 cgraph_process_same_body_aliases (void)
1027 {
1028 struct cgraph_node *node;
1029 for (node = cgraph_nodes; node; node = node->next)
1030 if (node->same_body_alias
1031 && !VEC_length (ipa_ref_t, node->symbol.ref_list.references))
1032 {
1033 struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias);
1034 ipa_record_reference (node, NULL, tgt, NULL, IPA_REF_ALIAS, NULL);
1035 }
1036 same_body_aliases_done = true;
1037 }
1038
1039 /* Process attributes common for vars and functions. */
1040
1041 static void
1042 process_common_attributes (tree decl)
1043 {
1044 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
1045
1046 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
1047 {
1048 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
1049 "%<weakref%> attribute should be accompanied with"
1050 " an %<alias%> attribute");
1051 DECL_WEAK (decl) = 0;
1052 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
1053 DECL_ATTRIBUTES (decl));
1054 }
1055 }
1056
1057 /* Look for externally_visible and used attributes and mark cgraph nodes
1058 accordingly.
1059
1060 We cannot mark the nodes at the point the attributes are processed (in
1061 handle_*_attribute) because the copy of the declarations available at that
1062 point may not be canonical. For example, in:
1063
1064 void f();
1065 void f() __attribute__((used));
1066
1067 the declaration we see in handle_used_attribute will be the second
1068 declaration -- but the front end will subsequently merge that declaration
1069 with the original declaration and discard the second declaration.
1070
1071 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
1072
1073 void f() {}
1074 void f() __attribute__((externally_visible));
1075
1076 is valid.
1077
1078 So, we walk the nodes at the end of the translation unit, applying the
1079 attributes at that point. */
1080
1081 static void
1082 process_function_and_variable_attributes (struct cgraph_node *first,
1083 struct varpool_node *first_var)
1084 {
1085 struct cgraph_node *node;
1086 struct varpool_node *vnode;
1087
1088 for (node = cgraph_nodes; node != first; node = node->next)
1089 {
1090 tree decl = node->symbol.decl;
1091 if (DECL_PRESERVE_P (decl))
1092 cgraph_mark_needed_node (node);
1093 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
1094 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl))
1095 && TREE_PUBLIC (node->symbol.decl))
1096 {
1097 if (node->local.finalized)
1098 cgraph_mark_needed_node (node);
1099 }
1100 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
1101 {
1102 if (! TREE_PUBLIC (node->symbol.decl))
1103 warning_at (DECL_SOURCE_LOCATION (node->symbol.decl), OPT_Wattributes,
1104 "%<externally_visible%>"
1105 " attribute have effect only on public objects");
1106 else if (node->local.finalized)
1107 cgraph_mark_needed_node (node);
1108 }
1109 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
1110 && (node->local.finalized && !node->alias))
1111 {
1112 warning_at (DECL_SOURCE_LOCATION (node->symbol.decl), OPT_Wattributes,
1113 "%<weakref%> attribute ignored"
1114 " because function is defined");
1115 DECL_WEAK (decl) = 0;
1116 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
1117 DECL_ATTRIBUTES (decl));
1118 }
1119
1120 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
1121 && !DECL_DECLARED_INLINE_P (decl)
1122 /* redefining extern inline function makes it DECL_UNINLINABLE. */
1123 && !DECL_UNINLINABLE (decl))
1124 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
1125 "always_inline function might not be inlinable");
1126
1127 process_common_attributes (decl);
1128 }
1129 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
1130 {
1131 tree decl = vnode->symbol.decl;
1132 if (DECL_PRESERVE_P (decl))
1133 {
1134 vnode->force_output = true;
1135 if (vnode->finalized)
1136 varpool_mark_needed_node (vnode);
1137 }
1138 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
1139 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl))
1140 && TREE_PUBLIC (vnode->symbol.decl))
1141 {
1142 if (vnode->finalized)
1143 varpool_mark_needed_node (vnode);
1144 }
1145 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
1146 {
1147 if (! TREE_PUBLIC (vnode->symbol.decl))
1148 warning_at (DECL_SOURCE_LOCATION (vnode->symbol.decl), OPT_Wattributes,
1149 "%<externally_visible%>"
1150 " attribute have effect only on public objects");
1151 else if (vnode->finalized)
1152 varpool_mark_needed_node (vnode);
1153 }
1154 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
1155 && vnode->finalized
1156 && DECL_INITIAL (decl))
1157 {
1158 warning_at (DECL_SOURCE_LOCATION (vnode->symbol.decl), OPT_Wattributes,
1159 "%<weakref%> attribute ignored"
1160 " because variable is initialized");
1161 DECL_WEAK (decl) = 0;
1162 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
1163 DECL_ATTRIBUTES (decl));
1164 }
1165 process_common_attributes (decl);
1166 }
1167 }
1168
1169 /* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
1170 each reachable functions) and build cgraph.
1171 The function can be called multiple times after inserting new nodes
1172 into beginning of queue. Just the new part of queue is re-scanned then. */
1173
1174 static void
1175 cgraph_analyze_functions (void)
1176 {
1177 /* Keep track of already processed nodes when called multiple times for
1178 intermodule optimization. */
1179 static struct cgraph_node *first_analyzed;
1180 struct cgraph_node *first_processed = first_analyzed;
1181 static struct varpool_node *first_analyzed_var;
1182 struct cgraph_node *node, *next;
1183
1184 bitmap_obstack_initialize (NULL);
1185 process_function_and_variable_attributes (first_processed,
1186 first_analyzed_var);
1187 first_processed = cgraph_nodes;
1188 first_analyzed_var = varpool_nodes;
1189 varpool_analyze_pending_decls ();
1190 if (cgraph_dump_file)
1191 {
1192 fprintf (cgraph_dump_file, "Initial entry points:");
1193 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1194 if (node->needed)
1195 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1196 fprintf (cgraph_dump_file, "\n");
1197 }
1198 cgraph_process_new_functions ();
1199
1200 /* Propagate reachability flag and lower representation of all reachable
1201 functions. In the future, lowering will introduce new functions and
1202 new entry points on the way (by template instantiation and virtual
1203 method table generation for instance). */
1204 while (cgraph_nodes_queue)
1205 {
1206 struct cgraph_edge *edge;
1207 tree decl = cgraph_nodes_queue->symbol.decl;
1208
1209 node = cgraph_nodes_queue;
1210 x_cgraph_nodes_queue = (symtab_node)cgraph_nodes_queue->next_needed;
1211 node->next_needed = NULL;
1212
1213 /* ??? It is possible to create extern inline function and later using
1214 weak alias attribute to kill its body. See
1215 gcc.c-torture/compile/20011119-1.c */
1216 if (!DECL_STRUCT_FUNCTION (decl)
1217 && (!node->alias || !node->thunk.alias)
1218 && !node->thunk.thunk_p)
1219 {
1220 cgraph_reset_node (node);
1221 node->local.redefined_extern_inline = true;
1222 continue;
1223 }
1224
1225 if (!node->analyzed)
1226 cgraph_analyze_function (node);
1227
1228 for (edge = node->callees; edge; edge = edge->next_callee)
1229 if (!edge->callee->reachable)
1230 cgraph_mark_reachable_node (edge->callee);
1231 for (edge = node->callers; edge; edge = edge->next_caller)
1232 if (!edge->caller->reachable && edge->caller->thunk.thunk_p)
1233 cgraph_mark_reachable_node (edge->caller);
1234
1235 if (node->symbol.same_comdat_group)
1236 {
1237 for (next = cgraph (node->symbol.same_comdat_group);
1238 next != node;
1239 next = cgraph (next->symbol.same_comdat_group))
1240 cgraph_mark_reachable_node (next);
1241 }
1242
1243 /* If decl is a clone of an abstract function, mark that abstract
1244 function so that we don't release its body. The DECL_INITIAL() of that
1245 abstract function declaration will be later needed to output debug
1246 info. */
1247 if (DECL_ABSTRACT_ORIGIN (decl))
1248 {
1249 struct cgraph_node *origin_node;
1250 origin_node = cgraph_get_node (DECL_ABSTRACT_ORIGIN (decl));
1251 origin_node->abstract_and_needed = true;
1252 }
1253
1254 /* We finalize local static variables during constructing callgraph
1255 edges. Process their attributes too. */
1256 process_function_and_variable_attributes (first_processed,
1257 first_analyzed_var);
1258 first_processed = cgraph_nodes;
1259 first_analyzed_var = varpool_nodes;
1260 varpool_analyze_pending_decls ();
1261 cgraph_process_new_functions ();
1262 }
1263
1264 /* Collect entry points to the unit. */
1265 if (cgraph_dump_file)
1266 {
1267 fprintf (cgraph_dump_file, "Unit entry points:");
1268 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1269 if (node->needed)
1270 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1271 fprintf (cgraph_dump_file, "\n\nInitial ");
1272 dump_cgraph (cgraph_dump_file);
1273 dump_varpool (cgraph_dump_file);
1274 }
1275
1276 if (cgraph_dump_file)
1277 fprintf (cgraph_dump_file, "\nReclaiming functions:");
1278
1279 for (node = cgraph_nodes; node != first_analyzed; node = next)
1280 {
1281 tree decl = node->symbol.decl;
1282 next = node->next;
1283
1284 if (node->local.finalized && !gimple_has_body_p (decl)
1285 && (!node->alias || !node->thunk.alias)
1286 && !node->thunk.thunk_p)
1287 cgraph_reset_node (node);
1288
1289 if (!node->reachable
1290 && (gimple_has_body_p (decl) || node->thunk.thunk_p
1291 || (node->alias && node->thunk.alias)))
1292 {
1293 if (cgraph_dump_file)
1294 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1295 cgraph_remove_node (node);
1296 continue;
1297 }
1298 else
1299 node->next_needed = NULL;
1300 gcc_assert (!node->local.finalized || node->thunk.thunk_p
1301 || node->alias
1302 || gimple_has_body_p (decl));
1303 gcc_assert (node->analyzed == node->local.finalized);
1304 }
1305 if (cgraph_dump_file)
1306 {
1307 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1308 dump_cgraph (cgraph_dump_file);
1309 dump_varpool (cgraph_dump_file);
1310 }
1311 bitmap_obstack_release (NULL);
1312 first_analyzed = cgraph_nodes;
1313 ggc_collect ();
1314 }
1315
1316 /* Translate the ugly representation of aliases as alias pairs into nice
1317 representation in callgraph. We don't handle all cases yet,
1318 unforutnately. */
1319
1320 static void
1321 handle_alias_pairs (void)
1322 {
1323 alias_pair *p;
1324 unsigned i;
1325 struct cgraph_node *target_node;
1326 struct cgraph_node *src_node;
1327 struct varpool_node *target_vnode;
1328
1329 for (i = 0; VEC_iterate (alias_pair, alias_pairs, i, p);)
1330 {
1331 if (TREE_CODE (p->decl) == FUNCTION_DECL
1332 && (target_node = cgraph_node_for_asm (p->target)) != NULL)
1333 {
1334 src_node = cgraph_get_node (p->decl);
1335 if (src_node && src_node->local.finalized)
1336 cgraph_reset_node (src_node);
1337 /* Normally EXTERNAL flag is used to mark external inlines,
1338 however for aliases it seems to be allowed to use it w/o
1339 any meaning. See gcc.dg/attr-alias-3.c
1340 However for weakref we insist on EXTERNAL flag being set.
1341 See gcc.dg/attr-alias-5.c */
1342 if (DECL_EXTERNAL (p->decl))
1343 DECL_EXTERNAL (p->decl)
1344 = lookup_attribute ("weakref",
1345 DECL_ATTRIBUTES (p->decl)) != NULL;
1346 cgraph_create_function_alias (p->decl, target_node->symbol.decl);
1347 VEC_unordered_remove (alias_pair, alias_pairs, i);
1348 }
1349 else if (TREE_CODE (p->decl) == VAR_DECL
1350 && (target_vnode = varpool_node_for_asm (p->target)) != NULL)
1351 {
1352 /* Normally EXTERNAL flag is used to mark external inlines,
1353 however for aliases it seems to be allowed to use it w/o
1354 any meaning. See gcc.dg/attr-alias-3.c
1355 However for weakref we insist on EXTERNAL flag being set.
1356 See gcc.dg/attr-alias-5.c */
1357 if (DECL_EXTERNAL (p->decl))
1358 DECL_EXTERNAL (p->decl)
1359 = lookup_attribute ("weakref",
1360 DECL_ATTRIBUTES (p->decl)) != NULL;
1361 varpool_create_variable_alias (p->decl, target_vnode->symbol.decl);
1362 VEC_unordered_remove (alias_pair, alias_pairs, i);
1363 }
1364 /* Weakrefs with target not defined in current unit are easy to handle; they
1365 behave just as external variables except we need to note the alias flag
1366 to later output the weakref pseudo op into asm file. */
1367 else if (lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL
1368 && (TREE_CODE (p->decl) == FUNCTION_DECL
1369 ? (varpool_node_for_asm (p->target) == NULL)
1370 : (cgraph_node_for_asm (p->target) == NULL)))
1371 {
1372 if (TREE_CODE (p->decl) == FUNCTION_DECL)
1373 cgraph_get_create_node (p->decl)->alias = true;
1374 else
1375 varpool_get_node (p->decl)->alias = true;
1376 DECL_EXTERNAL (p->decl) = 1;
1377 VEC_unordered_remove (alias_pair, alias_pairs, i);
1378 }
1379 else
1380 {
1381 if (dump_file)
1382 fprintf (dump_file, "Unhandled alias %s->%s\n",
1383 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (p->decl)),
1384 IDENTIFIER_POINTER (p->target));
1385
1386 i++;
1387 }
1388 }
1389 }
1390
1391
1392 /* Figure out what functions we want to assemble. */
1393
1394 static void
1395 cgraph_mark_functions_to_output (void)
1396 {
1397 struct cgraph_node *node;
1398 #ifdef ENABLE_CHECKING
1399 bool check_same_comdat_groups = false;
1400
1401 for (node = cgraph_nodes; node; node = node->next)
1402 gcc_assert (!node->process);
1403 #endif
1404
1405 for (node = cgraph_nodes; node; node = node->next)
1406 {
1407 tree decl = node->symbol.decl;
1408 struct cgraph_edge *e;
1409
1410 gcc_assert (!node->process || node->symbol.same_comdat_group);
1411 if (node->process)
1412 continue;
1413
1414 for (e = node->callers; e; e = e->next_caller)
1415 if (e->inline_failed)
1416 break;
1417
1418 /* We need to output all local functions that are used and not
1419 always inlined, as well as those that are reachable from
1420 outside the current compilation unit. */
1421 if (node->analyzed
1422 && !node->thunk.thunk_p
1423 && !node->alias
1424 && !node->global.inlined_to
1425 && (!cgraph_only_called_directly_p (node)
1426 || ((e || ipa_ref_has_aliases_p (&node->symbol.ref_list))
1427 && node->reachable))
1428 && !TREE_ASM_WRITTEN (decl)
1429 && !DECL_EXTERNAL (decl))
1430 {
1431 node->process = 1;
1432 if (node->symbol.same_comdat_group)
1433 {
1434 struct cgraph_node *next;
1435 for (next = cgraph (node->symbol.same_comdat_group);
1436 next != node;
1437 next = cgraph (next->symbol.same_comdat_group))
1438 if (!next->thunk.thunk_p && !next->alias)
1439 next->process = 1;
1440 }
1441 }
1442 else if (node->symbol.same_comdat_group)
1443 {
1444 #ifdef ENABLE_CHECKING
1445 check_same_comdat_groups = true;
1446 #endif
1447 }
1448 else
1449 {
1450 /* We should've reclaimed all functions that are not needed. */
1451 #ifdef ENABLE_CHECKING
1452 if (!node->global.inlined_to
1453 && gimple_has_body_p (decl)
1454 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1455 are inside partition, we can end up not removing the body since we no longer
1456 have analyzed node pointing to it. */
1457 && !node->symbol.in_other_partition
1458 && !node->alias
1459 && !DECL_EXTERNAL (decl))
1460 {
1461 dump_cgraph_node (stderr, node);
1462 internal_error ("failed to reclaim unneeded function");
1463 }
1464 #endif
1465 gcc_assert (node->global.inlined_to
1466 || !gimple_has_body_p (decl)
1467 || node->symbol.in_other_partition
1468 || DECL_EXTERNAL (decl));
1469
1470 }
1471
1472 }
1473 #ifdef ENABLE_CHECKING
1474 if (check_same_comdat_groups)
1475 for (node = cgraph_nodes; node; node = node->next)
1476 if (node->symbol.same_comdat_group && !node->process)
1477 {
1478 tree decl = node->symbol.decl;
1479 if (!node->global.inlined_to
1480 && gimple_has_body_p (decl)
1481 /* FIXME: in an ltrans unit when the offline copy is outside a
1482 partition but inline copies are inside a partition, we can
1483 end up not removing the body since we no longer have an
1484 analyzed node pointing to it. */
1485 && !node->symbol.in_other_partition
1486 && !DECL_EXTERNAL (decl))
1487 {
1488 dump_cgraph_node (stderr, node);
1489 internal_error ("failed to reclaim unneeded function in same "
1490 "comdat group");
1491 }
1492 }
1493 #endif
1494 }
1495
1496 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1497 in lowered gimple form.
1498
1499 Set current_function_decl and cfun to newly constructed empty function body.
1500 return basic block in the function body. */
1501
1502 static basic_block
1503 init_lowered_empty_function (tree decl)
1504 {
1505 basic_block bb;
1506
1507 current_function_decl = decl;
1508 allocate_struct_function (decl, false);
1509 gimple_register_cfg_hooks ();
1510 init_empty_tree_cfg ();
1511 init_tree_ssa (cfun);
1512 init_ssa_operands ();
1513 cfun->gimple_df->in_ssa_p = true;
1514 DECL_INITIAL (decl) = make_node (BLOCK);
1515
1516 DECL_SAVED_TREE (decl) = error_mark_node;
1517 cfun->curr_properties |=
1518 (PROP_gimple_lcf | PROP_gimple_leh | PROP_cfg | PROP_referenced_vars |
1519 PROP_ssa | PROP_gimple_any);
1520
1521 /* Create BB for body of the function and connect it properly. */
1522 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
1523 make_edge (ENTRY_BLOCK_PTR, bb, 0);
1524 make_edge (bb, EXIT_BLOCK_PTR, 0);
1525
1526 return bb;
1527 }
1528
1529 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1530 offset indicated by VIRTUAL_OFFSET, if that is
1531 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1532 zero for a result adjusting thunk. */
1533
1534 static tree
1535 thunk_adjust (gimple_stmt_iterator * bsi,
1536 tree ptr, bool this_adjusting,
1537 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1538 {
1539 gimple stmt;
1540 tree ret;
1541
1542 if (this_adjusting
1543 && fixed_offset != 0)
1544 {
1545 stmt = gimple_build_assign
1546 (ptr, fold_build_pointer_plus_hwi_loc (input_location,
1547 ptr,
1548 fixed_offset));
1549 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1550 }
1551
1552 /* If there's a virtual offset, look up that value in the vtable and
1553 adjust the pointer again. */
1554 if (virtual_offset)
1555 {
1556 tree vtabletmp;
1557 tree vtabletmp2;
1558 tree vtabletmp3;
1559
1560 if (!vtable_entry_type)
1561 {
1562 tree vfunc_type = make_node (FUNCTION_TYPE);
1563 TREE_TYPE (vfunc_type) = integer_type_node;
1564 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1565 layout_type (vfunc_type);
1566
1567 vtable_entry_type = build_pointer_type (vfunc_type);
1568 }
1569
1570 vtabletmp =
1571 create_tmp_var (build_pointer_type
1572 (build_pointer_type (vtable_entry_type)), "vptr");
1573
1574 /* The vptr is always at offset zero in the object. */
1575 stmt = gimple_build_assign (vtabletmp,
1576 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1577 ptr));
1578 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1579 mark_symbols_for_renaming (stmt);
1580 find_referenced_vars_in (stmt);
1581
1582 /* Form the vtable address. */
1583 vtabletmp2 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp)),
1584 "vtableaddr");
1585 stmt = gimple_build_assign (vtabletmp2,
1586 build_simple_mem_ref (vtabletmp));
1587 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1588 mark_symbols_for_renaming (stmt);
1589 find_referenced_vars_in (stmt);
1590
1591 /* Find the entry with the vcall offset. */
1592 stmt = gimple_build_assign (vtabletmp2,
1593 fold_build_pointer_plus_loc (input_location,
1594 vtabletmp2,
1595 virtual_offset));
1596 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1597
1598 /* Get the offset itself. */
1599 vtabletmp3 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1600 "vcalloffset");
1601 stmt = gimple_build_assign (vtabletmp3,
1602 build_simple_mem_ref (vtabletmp2));
1603 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1604 mark_symbols_for_renaming (stmt);
1605 find_referenced_vars_in (stmt);
1606
1607 /* Adjust the `this' pointer. */
1608 ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
1609 ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1610 GSI_CONTINUE_LINKING);
1611 }
1612
1613 if (!this_adjusting
1614 && fixed_offset != 0)
1615 /* Adjust the pointer by the constant. */
1616 {
1617 tree ptrtmp;
1618
1619 if (TREE_CODE (ptr) == VAR_DECL)
1620 ptrtmp = ptr;
1621 else
1622 {
1623 ptrtmp = create_tmp_var (TREE_TYPE (ptr), "ptr");
1624 stmt = gimple_build_assign (ptrtmp, ptr);
1625 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1626 mark_symbols_for_renaming (stmt);
1627 find_referenced_vars_in (stmt);
1628 }
1629 ptr = fold_build_pointer_plus_hwi_loc (input_location,
1630 ptrtmp, fixed_offset);
1631 }
1632
1633 /* Emit the statement and gimplify the adjustment expression. */
1634 ret = create_tmp_var (TREE_TYPE (ptr), "adjusted_this");
1635 stmt = gimple_build_assign (ret, ptr);
1636 mark_symbols_for_renaming (stmt);
1637 find_referenced_vars_in (stmt);
1638 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1639
1640 return ret;
1641 }
1642
1643 /* Produce assembler for thunk NODE. */
1644
1645 static void
1646 assemble_thunk (struct cgraph_node *node)
1647 {
1648 bool this_adjusting = node->thunk.this_adjusting;
1649 HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1650 HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1651 tree virtual_offset = NULL;
1652 tree alias = node->thunk.alias;
1653 tree thunk_fndecl = node->symbol.decl;
1654 tree a = DECL_ARGUMENTS (thunk_fndecl);
1655
1656 current_function_decl = thunk_fndecl;
1657
1658 /* Ensure thunks are emitted in their correct sections. */
1659 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1660
1661 if (this_adjusting
1662 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1663 virtual_value, alias))
1664 {
1665 const char *fnname;
1666 tree fn_block;
1667 tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1668
1669 DECL_RESULT (thunk_fndecl)
1670 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1671 RESULT_DECL, 0, restype);
1672 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1673
1674 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1675 create one. */
1676 fn_block = make_node (BLOCK);
1677 BLOCK_VARS (fn_block) = a;
1678 DECL_INITIAL (thunk_fndecl) = fn_block;
1679 init_function_start (thunk_fndecl);
1680 cfun->is_thunk = 1;
1681 assemble_start_function (thunk_fndecl, fnname);
1682
1683 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1684 fixed_offset, virtual_value, alias);
1685
1686 assemble_end_function (thunk_fndecl, fnname);
1687 init_insn_lengths ();
1688 free_after_compilation (cfun);
1689 set_cfun (NULL);
1690 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1691 node->thunk.thunk_p = false;
1692 node->analyzed = false;
1693 }
1694 else
1695 {
1696 tree restype;
1697 basic_block bb, then_bb, else_bb, return_bb;
1698 gimple_stmt_iterator bsi;
1699 int nargs = 0;
1700 tree arg;
1701 int i;
1702 tree resdecl;
1703 tree restmp = NULL;
1704 VEC(tree, heap) *vargs;
1705
1706 gimple call;
1707 gimple ret;
1708
1709 DECL_IGNORED_P (thunk_fndecl) = 1;
1710 bitmap_obstack_initialize (NULL);
1711
1712 if (node->thunk.virtual_offset_p)
1713 virtual_offset = size_int (virtual_value);
1714
1715 /* Build the return declaration for the function. */
1716 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1717 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1718 {
1719 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1720 DECL_ARTIFICIAL (resdecl) = 1;
1721 DECL_IGNORED_P (resdecl) = 1;
1722 DECL_RESULT (thunk_fndecl) = resdecl;
1723 }
1724 else
1725 resdecl = DECL_RESULT (thunk_fndecl);
1726
1727 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl);
1728
1729 bsi = gsi_start_bb (bb);
1730
1731 /* Build call to the function being thunked. */
1732 if (!VOID_TYPE_P (restype))
1733 {
1734 if (!is_gimple_reg_type (restype))
1735 {
1736 restmp = resdecl;
1737 add_local_decl (cfun, restmp);
1738 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1739 }
1740 else
1741 restmp = create_tmp_var_raw (restype, "retval");
1742 }
1743
1744 for (arg = a; arg; arg = DECL_CHAIN (arg))
1745 nargs++;
1746 vargs = VEC_alloc (tree, heap, nargs);
1747 if (this_adjusting)
1748 VEC_quick_push (tree, vargs,
1749 thunk_adjust (&bsi,
1750 a, 1, fixed_offset,
1751 virtual_offset));
1752 else
1753 VEC_quick_push (tree, vargs, a);
1754 for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
1755 VEC_quick_push (tree, vargs, arg);
1756 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1757 VEC_free (tree, heap, vargs);
1758 gimple_call_set_from_thunk (call, true);
1759 if (restmp)
1760 gimple_call_set_lhs (call, restmp);
1761 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1762 mark_symbols_for_renaming (call);
1763 find_referenced_vars_in (call);
1764 update_stmt (call);
1765
1766 if (restmp && !this_adjusting)
1767 {
1768 tree true_label = NULL_TREE;
1769
1770 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1771 {
1772 gimple stmt;
1773 /* If the return type is a pointer, we need to
1774 protect against NULL. We know there will be an
1775 adjustment, because that's why we're emitting a
1776 thunk. */
1777 then_bb = create_basic_block (NULL, (void *) 0, bb);
1778 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1779 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1780 remove_edge (single_succ_edge (bb));
1781 true_label = gimple_block_label (then_bb);
1782 stmt = gimple_build_cond (NE_EXPR, restmp,
1783 build_zero_cst (TREE_TYPE (restmp)),
1784 NULL_TREE, NULL_TREE);
1785 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1786 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1787 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1788 make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1789 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1790 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1791 bsi = gsi_last_bb (then_bb);
1792 }
1793
1794 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1795 fixed_offset, virtual_offset);
1796 if (true_label)
1797 {
1798 gimple stmt;
1799 bsi = gsi_last_bb (else_bb);
1800 stmt = gimple_build_assign (restmp,
1801 build_zero_cst (TREE_TYPE (restmp)));
1802 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1803 bsi = gsi_last_bb (return_bb);
1804 }
1805 }
1806 else
1807 gimple_call_set_tail (call, true);
1808
1809 /* Build return value. */
1810 ret = gimple_build_return (restmp);
1811 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1812
1813 delete_unreachable_blocks ();
1814 update_ssa (TODO_update_ssa);
1815
1816 /* Since we want to emit the thunk, we explicitly mark its name as
1817 referenced. */
1818 node->thunk.thunk_p = false;
1819 cgraph_node_remove_callees (node);
1820 cgraph_add_new_function (thunk_fndecl, true);
1821 bitmap_obstack_release (NULL);
1822 }
1823 current_function_decl = NULL;
1824 }
1825
1826
1827
1828 /* Assemble thunks and aliases asociated to NODE. */
1829
1830 static void
1831 assemble_thunks_and_aliases (struct cgraph_node *node)
1832 {
1833 struct cgraph_edge *e;
1834 int i;
1835 struct ipa_ref *ref;
1836
1837 for (e = node->callers; e;)
1838 if (e->caller->thunk.thunk_p)
1839 {
1840 struct cgraph_node *thunk = e->caller;
1841
1842 e = e->next_caller;
1843 assemble_thunks_and_aliases (thunk);
1844 assemble_thunk (thunk);
1845 }
1846 else
1847 e = e->next_caller;
1848 for (i = 0; ipa_ref_list_refering_iterate (&node->symbol.ref_list,
1849 i, ref); i++)
1850 if (ref->use == IPA_REF_ALIAS)
1851 {
1852 struct cgraph_node *alias = ipa_ref_refering_node (ref);
1853 bool saved_written = TREE_ASM_WRITTEN (alias->thunk.alias);
1854
1855 /* Force assemble_alias to really output the alias this time instead
1856 of buffering it in same alias pairs. */
1857 TREE_ASM_WRITTEN (alias->thunk.alias) = 1;
1858 assemble_alias (alias->symbol.decl,
1859 DECL_ASSEMBLER_NAME (alias->thunk.alias));
1860 assemble_thunks_and_aliases (alias);
1861 TREE_ASM_WRITTEN (alias->thunk.alias) = saved_written;
1862 }
1863 }
1864
1865 /* Perform IPA transforms and all further optimizations and compilation
1866 for FNDECL. */
1867
1868 static void
1869 tree_rest_of_compilation (struct cgraph_node *node)
1870 {
1871 tree fndecl = node->symbol.decl;
1872 location_t saved_loc;
1873
1874 timevar_push (TV_REST_OF_COMPILATION);
1875
1876 gcc_assert (cgraph_global_info_ready);
1877
1878 /* Initialize the default bitmap obstack. */
1879 bitmap_obstack_initialize (NULL);
1880
1881 /* Initialize the RTL code for the function. */
1882 current_function_decl = fndecl;
1883 saved_loc = input_location;
1884 input_location = DECL_SOURCE_LOCATION (fndecl);
1885 init_function_start (fndecl);
1886
1887 gimple_register_cfg_hooks ();
1888
1889 bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
1890
1891 execute_all_ipa_transforms ();
1892
1893 /* Perform all tree transforms and optimizations. */
1894
1895 /* Signal the start of passes. */
1896 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START, NULL);
1897
1898 execute_pass_list (all_passes);
1899
1900 /* Signal the end of passes. */
1901 invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END, NULL);
1902
1903 bitmap_obstack_release (&reg_obstack);
1904
1905 /* Release the default bitmap obstack. */
1906 bitmap_obstack_release (NULL);
1907
1908 set_cfun (NULL);
1909
1910 /* If requested, warn about function definitions where the function will
1911 return a value (usually of some struct or union type) which itself will
1912 take up a lot of stack space. */
1913 if (warn_larger_than && !DECL_EXTERNAL (fndecl) && TREE_TYPE (fndecl))
1914 {
1915 tree ret_type = TREE_TYPE (TREE_TYPE (fndecl));
1916
1917 if (ret_type && TYPE_SIZE_UNIT (ret_type)
1918 && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
1919 && 0 < compare_tree_int (TYPE_SIZE_UNIT (ret_type),
1920 larger_than_size))
1921 {
1922 unsigned int size_as_int
1923 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
1924
1925 if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
1926 warning (OPT_Wlarger_than_, "size of return value of %q+D is %u bytes",
1927 fndecl, size_as_int);
1928 else
1929 warning (OPT_Wlarger_than_, "size of return value of %q+D is larger than %wd bytes",
1930 fndecl, larger_than_size);
1931 }
1932 }
1933
1934 gimple_set_body (fndecl, NULL);
1935 if (DECL_STRUCT_FUNCTION (fndecl) == 0
1936 && !cgraph_get_node (fndecl)->origin)
1937 {
1938 /* Stop pointing to the local nodes about to be freed.
1939 But DECL_INITIAL must remain nonzero so we know this
1940 was an actual function definition.
1941 For a nested function, this is done in c_pop_function_context.
1942 If rest_of_compilation set this to 0, leave it 0. */
1943 if (DECL_INITIAL (fndecl) != 0)
1944 DECL_INITIAL (fndecl) = error_mark_node;
1945 }
1946
1947 input_location = saved_loc;
1948
1949 ggc_collect ();
1950 timevar_pop (TV_REST_OF_COMPILATION);
1951 }
1952
1953 /* Expand function specified by NODE. */
1954
1955 static void
1956 cgraph_expand_function (struct cgraph_node *node)
1957 {
1958 tree decl = node->symbol.decl;
1959
1960 /* We ought to not compile any inline clones. */
1961 gcc_assert (!node->global.inlined_to);
1962
1963 announce_function (decl);
1964 node->process = 0;
1965 gcc_assert (node->lowered);
1966
1967 /* Generate RTL for the body of DECL. */
1968 tree_rest_of_compilation (node);
1969
1970 /* Make sure that BE didn't give up on compiling. */
1971 gcc_assert (TREE_ASM_WRITTEN (decl));
1972 current_function_decl = NULL;
1973 gcc_assert (!cgraph_preserve_function_body_p (node));
1974
1975 /* It would make a lot more sense to output thunks before function body to get more
1976 forward and lest backwarding jumps. This is however would need solving problem
1977 with comdats. See PR48668. Also aliases must come after function itself to
1978 make one pass assemblers, like one on AIX happy. See PR 50689.
1979 FIXME: Perhaps thunks should be move before function IFF they are not in comdat
1980 groups. */
1981 assemble_thunks_and_aliases (node);
1982 cgraph_release_function_body (node);
1983 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1984 points to the dead function body. */
1985 cgraph_node_remove_callees (node);
1986 }
1987
1988 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
1989
1990 bool
1991 cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason)
1992 {
1993 *reason = e->inline_failed;
1994 return !e->inline_failed;
1995 }
1996
1997
1998
1999 /* Expand all functions that must be output.
2000
2001 Attempt to topologically sort the nodes so function is output when
2002 all called functions are already assembled to allow data to be
2003 propagated across the callgraph. Use a stack to get smaller distance
2004 between a function and its callees (later we may choose to use a more
2005 sophisticated algorithm for function reordering; we will likely want
2006 to use subsections to make the output functions appear in top-down
2007 order). */
2008
2009 static void
2010 cgraph_expand_all_functions (void)
2011 {
2012 struct cgraph_node *node;
2013 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
2014 int order_pos, new_order_pos = 0;
2015 int i;
2016
2017 order_pos = ipa_reverse_postorder (order);
2018 gcc_assert (order_pos == cgraph_n_nodes);
2019
2020 /* Garbage collector may remove inline clones we eliminate during
2021 optimization. So we must be sure to not reference them. */
2022 for (i = 0; i < order_pos; i++)
2023 if (order[i]->process)
2024 order[new_order_pos++] = order[i];
2025
2026 for (i = new_order_pos - 1; i >= 0; i--)
2027 {
2028 node = order[i];
2029 if (node->process)
2030 {
2031 gcc_assert (node->reachable);
2032 node->process = 0;
2033 cgraph_expand_function (node);
2034 }
2035 }
2036 cgraph_process_new_functions ();
2037
2038 free (order);
2039
2040 }
2041
2042 /* This is used to sort the node types by the cgraph order number. */
2043
2044 enum cgraph_order_sort_kind
2045 {
2046 ORDER_UNDEFINED = 0,
2047 ORDER_FUNCTION,
2048 ORDER_VAR,
2049 ORDER_ASM
2050 };
2051
2052 struct cgraph_order_sort
2053 {
2054 enum cgraph_order_sort_kind kind;
2055 union
2056 {
2057 struct cgraph_node *f;
2058 struct varpool_node *v;
2059 struct cgraph_asm_node *a;
2060 } u;
2061 };
2062
2063 /* Output all functions, variables, and asm statements in the order
2064 according to their order fields, which is the order in which they
2065 appeared in the file. This implements -fno-toplevel-reorder. In
2066 this mode we may output functions and variables which don't really
2067 need to be output. */
2068
2069 static void
2070 cgraph_output_in_order (void)
2071 {
2072 int max;
2073 struct cgraph_order_sort *nodes;
2074 int i;
2075 struct cgraph_node *pf;
2076 struct varpool_node *pv;
2077 struct cgraph_asm_node *pa;
2078
2079 max = cgraph_order;
2080 nodes = XCNEWVEC (struct cgraph_order_sort, max);
2081
2082 varpool_analyze_pending_decls ();
2083
2084 for (pf = cgraph_nodes; pf; pf = pf->next)
2085 {
2086 if (pf->process && !pf->thunk.thunk_p && !pf->alias)
2087 {
2088 i = pf->symbol.order;
2089 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2090 nodes[i].kind = ORDER_FUNCTION;
2091 nodes[i].u.f = pf;
2092 }
2093 }
2094
2095 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
2096 {
2097 i = pv->symbol.order;
2098 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2099 nodes[i].kind = ORDER_VAR;
2100 nodes[i].u.v = pv;
2101 }
2102
2103 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
2104 {
2105 i = pa->order;
2106 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2107 nodes[i].kind = ORDER_ASM;
2108 nodes[i].u.a = pa;
2109 }
2110
2111 /* In toplevel reorder mode we output all statics; mark them as needed. */
2112 for (i = 0; i < max; ++i)
2113 {
2114 if (nodes[i].kind == ORDER_VAR)
2115 {
2116 varpool_mark_needed_node (nodes[i].u.v);
2117 }
2118 }
2119 varpool_empty_needed_queue ();
2120
2121 for (i = 0; i < max; ++i)
2122 if (nodes[i].kind == ORDER_VAR)
2123 varpool_finalize_named_section_flags (nodes[i].u.v);
2124
2125 for (i = 0; i < max; ++i)
2126 {
2127 switch (nodes[i].kind)
2128 {
2129 case ORDER_FUNCTION:
2130 nodes[i].u.f->process = 0;
2131 cgraph_expand_function (nodes[i].u.f);
2132 break;
2133
2134 case ORDER_VAR:
2135 varpool_assemble_decl (nodes[i].u.v);
2136 break;
2137
2138 case ORDER_ASM:
2139 assemble_asm (nodes[i].u.a->asm_str);
2140 break;
2141
2142 case ORDER_UNDEFINED:
2143 break;
2144
2145 default:
2146 gcc_unreachable ();
2147 }
2148 }
2149
2150 cgraph_asm_nodes = NULL;
2151 free (nodes);
2152 }
2153
2154 /* Return true when function body of DECL still needs to be kept around
2155 for later re-use. */
2156 bool
2157 cgraph_preserve_function_body_p (struct cgraph_node *node)
2158 {
2159 gcc_assert (cgraph_global_info_ready);
2160 gcc_assert (!node->alias && !node->thunk.thunk_p);
2161
2162 /* Look if there is any clone around. */
2163 if (node->clones)
2164 return true;
2165 return false;
2166 }
2167
2168 static void
2169 ipa_passes (void)
2170 {
2171 set_cfun (NULL);
2172 current_function_decl = NULL;
2173 gimple_register_cfg_hooks ();
2174 bitmap_obstack_initialize (NULL);
2175
2176 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
2177
2178 if (!in_lto_p)
2179 {
2180 execute_ipa_pass_list (all_small_ipa_passes);
2181 if (seen_error ())
2182 return;
2183 }
2184
2185 /* We never run removal of unreachable nodes after early passes. This is
2186 because TODO is run before the subpasses. It is important to remove
2187 the unreachable functions to save works at IPA level and to get LTO
2188 symbol tables right. */
2189 cgraph_remove_unreachable_nodes (true, cgraph_dump_file);
2190
2191 /* If pass_all_early_optimizations was not scheduled, the state of
2192 the cgraph will not be properly updated. Update it now. */
2193 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
2194 cgraph_state = CGRAPH_STATE_IPA_SSA;
2195
2196 if (!in_lto_p)
2197 {
2198 /* Generate coverage variables and constructors. */
2199 coverage_finish ();
2200
2201 /* Process new functions added. */
2202 set_cfun (NULL);
2203 current_function_decl = NULL;
2204 cgraph_process_new_functions ();
2205
2206 execute_ipa_summary_passes
2207 ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
2208 }
2209
2210 /* Some targets need to handle LTO assembler output specially. */
2211 if (flag_generate_lto)
2212 targetm.asm_out.lto_start ();
2213
2214 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
2215
2216 if (!in_lto_p)
2217 ipa_write_summaries ();
2218
2219 if (flag_generate_lto)
2220 targetm.asm_out.lto_end ();
2221
2222 if (!flag_ltrans && (in_lto_p || !flag_lto || flag_fat_lto_objects))
2223 execute_ipa_pass_list (all_regular_ipa_passes);
2224 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
2225
2226 bitmap_obstack_release (NULL);
2227 }
2228
2229
2230 /* Return string alias is alias of. */
2231
2232 static tree
2233 get_alias_symbol (tree decl)
2234 {
2235 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
2236 return get_identifier (TREE_STRING_POINTER
2237 (TREE_VALUE (TREE_VALUE (alias))));
2238 }
2239
2240
2241 /* Weakrefs may be associated to external decls and thus not output
2242 at expansion time. Emit all neccesary aliases. */
2243
2244 static void
2245 output_weakrefs (void)
2246 {
2247 struct cgraph_node *node;
2248 struct varpool_node *vnode;
2249 for (node = cgraph_nodes; node; node = node->next)
2250 if (node->alias && DECL_EXTERNAL (node->symbol.decl)
2251 && !TREE_ASM_WRITTEN (node->symbol.decl)
2252 && lookup_attribute ("weakref", DECL_ATTRIBUTES (node->symbol.decl)))
2253 assemble_alias (node->symbol.decl,
2254 node->thunk.alias ? DECL_ASSEMBLER_NAME (node->thunk.alias)
2255 : get_alias_symbol (node->symbol.decl));
2256 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
2257 if (vnode->alias && DECL_EXTERNAL (vnode->symbol.decl)
2258 && !TREE_ASM_WRITTEN (vnode->symbol.decl)
2259 && lookup_attribute ("weakref", DECL_ATTRIBUTES (vnode->symbol.decl)))
2260 assemble_alias (vnode->symbol.decl,
2261 vnode->alias_of ? DECL_ASSEMBLER_NAME (vnode->alias_of)
2262 : get_alias_symbol (vnode->symbol.decl));
2263 }
2264
2265
2266
2267 void
2268 init_cgraph (void)
2269 {
2270 if (!cgraph_dump_file)
2271 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
2272 }
2273
2274 /* The edges representing the callers of the NEW_VERSION node were
2275 fixed by cgraph_function_versioning (), now the call_expr in their
2276 respective tree code should be updated to call the NEW_VERSION. */
2277
2278 static void
2279 update_call_expr (struct cgraph_node *new_version)
2280 {
2281 struct cgraph_edge *e;
2282
2283 gcc_assert (new_version);
2284
2285 /* Update the call expr on the edges to call the new version. */
2286 for (e = new_version->callers; e; e = e->next_caller)
2287 {
2288 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->symbol.decl);
2289 gimple_call_set_fndecl (e->call_stmt, new_version->symbol.decl);
2290 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
2291 }
2292 }
2293
2294
2295 /* Create a new cgraph node which is the new version of
2296 OLD_VERSION node. REDIRECT_CALLERS holds the callers
2297 edges which should be redirected to point to
2298 NEW_VERSION. ALL the callees edges of OLD_VERSION
2299 are cloned to the new version node. Return the new
2300 version node.
2301
2302 If non-NULL BLOCK_TO_COPY determine what basic blocks
2303 was copied to prevent duplications of calls that are dead
2304 in the clone. */
2305
2306 struct cgraph_node *
2307 cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
2308 tree new_decl,
2309 VEC(cgraph_edge_p,heap) *redirect_callers,
2310 bitmap bbs_to_copy)
2311 {
2312 struct cgraph_node *new_version;
2313 struct cgraph_edge *e;
2314 unsigned i;
2315
2316 gcc_assert (old_version);
2317
2318 new_version = cgraph_create_node (new_decl);
2319
2320 new_version->analyzed = old_version->analyzed;
2321 new_version->local = old_version->local;
2322 new_version->symbol.externally_visible = false;
2323 new_version->local.local = true;
2324 new_version->global = old_version->global;
2325 new_version->rtl = old_version->rtl;
2326 new_version->reachable = true;
2327 new_version->count = old_version->count;
2328
2329 for (e = old_version->callees; e; e=e->next_callee)
2330 if (!bbs_to_copy
2331 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
2332 cgraph_clone_edge (e, new_version, e->call_stmt,
2333 e->lto_stmt_uid, REG_BR_PROB_BASE,
2334 CGRAPH_FREQ_BASE,
2335 true);
2336 for (e = old_version->indirect_calls; e; e=e->next_callee)
2337 if (!bbs_to_copy
2338 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
2339 cgraph_clone_edge (e, new_version, e->call_stmt,
2340 e->lto_stmt_uid, REG_BR_PROB_BASE,
2341 CGRAPH_FREQ_BASE,
2342 true);
2343 FOR_EACH_VEC_ELT (cgraph_edge_p, redirect_callers, i, e)
2344 {
2345 /* Redirect calls to the old version node to point to its new
2346 version. */
2347 cgraph_redirect_edge_callee (e, new_version);
2348 }
2349
2350 cgraph_call_node_duplication_hooks (old_version, new_version);
2351
2352 return new_version;
2353 }
2354
2355 /* Perform function versioning.
2356 Function versioning includes copying of the tree and
2357 a callgraph update (creating a new cgraph node and updating
2358 its callees and callers).
2359
2360 REDIRECT_CALLERS varray includes the edges to be redirected
2361 to the new version.
2362
2363 TREE_MAP is a mapping of tree nodes we want to replace with
2364 new ones (according to results of prior analysis).
2365 OLD_VERSION_NODE is the node that is versioned.
2366
2367 If non-NULL ARGS_TO_SKIP determine function parameters to remove
2368 from new version.
2369 If SKIP_RETURN is true, the new version will return void.
2370 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
2371 If non_NULL NEW_ENTRY determine new entry BB of the clone.
2372
2373 Return the new version's cgraph node. */
2374
2375 struct cgraph_node *
2376 cgraph_function_versioning (struct cgraph_node *old_version_node,
2377 VEC(cgraph_edge_p,heap) *redirect_callers,
2378 VEC (ipa_replace_map_p,gc)* tree_map,
2379 bitmap args_to_skip,
2380 bool skip_return,
2381 bitmap bbs_to_copy,
2382 basic_block new_entry_block,
2383 const char *clone_name)
2384 {
2385 tree old_decl = old_version_node->symbol.decl;
2386 struct cgraph_node *new_version_node = NULL;
2387 tree new_decl;
2388
2389 if (!tree_versionable_function_p (old_decl))
2390 return NULL;
2391
2392 gcc_assert (old_version_node->local.can_change_signature || !args_to_skip);
2393
2394 /* Make a new FUNCTION_DECL tree node for the new version. */
2395 if (!args_to_skip && !skip_return)
2396 new_decl = copy_node (old_decl);
2397 else
2398 new_decl
2399 = build_function_decl_skip_args (old_decl, args_to_skip, skip_return);
2400
2401 /* Generate a new name for the new version. */
2402 DECL_NAME (new_decl) = clone_function_name (old_decl, clone_name);
2403 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
2404 SET_DECL_RTL (new_decl, NULL);
2405
2406 /* When the old decl was a con-/destructor make sure the clone isn't. */
2407 DECL_STATIC_CONSTRUCTOR(new_decl) = 0;
2408 DECL_STATIC_DESTRUCTOR(new_decl) = 0;
2409
2410 /* Create the new version's call-graph node.
2411 and update the edges of the new node. */
2412 new_version_node =
2413 cgraph_copy_node_for_versioning (old_version_node, new_decl,
2414 redirect_callers, bbs_to_copy);
2415
2416 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2417 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip,
2418 skip_return, bbs_to_copy, new_entry_block);
2419
2420 /* Update the new version's properties.
2421 Make The new version visible only within this translation unit. Make sure
2422 that is not weak also.
2423 ??? We cannot use COMDAT linkage because there is no
2424 ABI support for this. */
2425 cgraph_make_decl_local (new_version_node->symbol.decl);
2426 DECL_VIRTUAL_P (new_version_node->symbol.decl) = 0;
2427 new_version_node->symbol.externally_visible = 0;
2428 new_version_node->local.local = 1;
2429 new_version_node->lowered = true;
2430
2431 /* Update the call_expr on the edges to call the new version node. */
2432 update_call_expr (new_version_node);
2433
2434 cgraph_call_function_insertion_hooks (new_version_node);
2435 return new_version_node;
2436 }
2437
2438 /* Given virtual clone, turn it into actual clone. */
2439 static void
2440 cgraph_materialize_clone (struct cgraph_node *node)
2441 {
2442 bitmap_obstack_initialize (NULL);
2443 node->former_clone_of = node->clone_of->symbol.decl;
2444 if (node->clone_of->former_clone_of)
2445 node->former_clone_of = node->clone_of->former_clone_of;
2446 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2447 tree_function_versioning (node->clone_of->symbol.decl, node->symbol.decl,
2448 node->clone.tree_map, true,
2449 node->clone.args_to_skip, false,
2450 NULL, NULL);
2451 if (cgraph_dump_file)
2452 {
2453 dump_function_to_file (node->clone_of->symbol.decl, cgraph_dump_file, dump_flags);
2454 dump_function_to_file (node->symbol.decl, cgraph_dump_file, dump_flags);
2455 }
2456
2457 /* Function is no longer clone. */
2458 if (node->next_sibling_clone)
2459 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
2460 if (node->prev_sibling_clone)
2461 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
2462 else
2463 node->clone_of->clones = node->next_sibling_clone;
2464 node->next_sibling_clone = NULL;
2465 node->prev_sibling_clone = NULL;
2466 if (!node->clone_of->analyzed && !node->clone_of->clones)
2467 {
2468 cgraph_release_function_body (node->clone_of);
2469 cgraph_node_remove_callees (node->clone_of);
2470 ipa_remove_all_references (&node->clone_of->symbol.ref_list);
2471 }
2472 node->clone_of = NULL;
2473 bitmap_obstack_release (NULL);
2474 }
2475
2476 /* If necessary, change the function declaration in the call statement
2477 associated with E so that it corresponds to the edge callee. */
2478
2479 gimple
2480 cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e)
2481 {
2482 tree decl = gimple_call_fndecl (e->call_stmt);
2483 gimple new_stmt;
2484 gimple_stmt_iterator gsi;
2485 #ifdef ENABLE_CHECKING
2486 struct cgraph_node *node;
2487 #endif
2488
2489 if (e->indirect_unknown_callee
2490 || decl == e->callee->symbol.decl)
2491 return e->call_stmt;
2492
2493 #ifdef ENABLE_CHECKING
2494 if (decl)
2495 {
2496 node = cgraph_get_node (decl);
2497 gcc_assert (!node || !node->clone.combined_args_to_skip);
2498 }
2499 #endif
2500
2501 if (cgraph_dump_file)
2502 {
2503 fprintf (cgraph_dump_file, "updating call of %s/%i -> %s/%i: ",
2504 cgraph_node_name (e->caller), e->caller->uid,
2505 cgraph_node_name (e->callee), e->callee->uid);
2506 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2507 if (e->callee->clone.combined_args_to_skip)
2508 {
2509 fprintf (cgraph_dump_file, " combined args to skip: ");
2510 dump_bitmap (cgraph_dump_file,
2511 e->callee->clone.combined_args_to_skip);
2512 }
2513 }
2514
2515 if (e->callee->clone.combined_args_to_skip)
2516 {
2517 int lp_nr;
2518
2519 new_stmt
2520 = gimple_call_copy_skip_args (e->call_stmt,
2521 e->callee->clone.combined_args_to_skip);
2522 gimple_call_set_fndecl (new_stmt, e->callee->symbol.decl);
2523
2524 if (gimple_vdef (new_stmt)
2525 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
2526 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2527
2528 gsi = gsi_for_stmt (e->call_stmt);
2529 gsi_replace (&gsi, new_stmt, false);
2530 /* We need to defer cleaning EH info on the new statement to
2531 fixup-cfg. We may not have dominator information at this point
2532 and thus would end up with unreachable blocks and have no way
2533 to communicate that we need to run CFG cleanup then. */
2534 lp_nr = lookup_stmt_eh_lp (e->call_stmt);
2535 if (lp_nr != 0)
2536 {
2537 remove_stmt_from_eh_lp (e->call_stmt);
2538 add_stmt_to_eh_lp (new_stmt, lp_nr);
2539 }
2540 }
2541 else
2542 {
2543 new_stmt = e->call_stmt;
2544 gimple_call_set_fndecl (new_stmt, e->callee->symbol.decl);
2545 update_stmt (new_stmt);
2546 }
2547
2548 cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt, new_stmt);
2549
2550 if (cgraph_dump_file)
2551 {
2552 fprintf (cgraph_dump_file, " updated to:");
2553 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2554 }
2555 return new_stmt;
2556 }
2557
2558 /* Once all functions from compilation unit are in memory, produce all clones
2559 and update all calls. We might also do this on demand if we don't want to
2560 bring all functions to memory prior compilation, but current WHOPR
2561 implementation does that and it is is bit easier to keep everything right in
2562 this order. */
2563 static void
2564 cgraph_materialize_all_clones (void)
2565 {
2566 struct cgraph_node *node;
2567 bool stabilized = false;
2568
2569 if (cgraph_dump_file)
2570 fprintf (cgraph_dump_file, "Materializing clones\n");
2571 #ifdef ENABLE_CHECKING
2572 verify_cgraph ();
2573 #endif
2574
2575 /* We can also do topological order, but number of iterations should be
2576 bounded by number of IPA passes since single IPA pass is probably not
2577 going to create clones of clones it created itself. */
2578 while (!stabilized)
2579 {
2580 stabilized = true;
2581 for (node = cgraph_nodes; node; node = node->next)
2582 {
2583 if (node->clone_of && node->symbol.decl != node->clone_of->symbol.decl
2584 && !gimple_has_body_p (node->symbol.decl))
2585 {
2586 if (gimple_has_body_p (node->clone_of->symbol.decl))
2587 {
2588 if (cgraph_dump_file)
2589 {
2590 fprintf (cgraph_dump_file, "cloning %s to %s\n",
2591 cgraph_node_name (node->clone_of),
2592 cgraph_node_name (node));
2593 if (node->clone.tree_map)
2594 {
2595 unsigned int i;
2596 fprintf (cgraph_dump_file, " replace map: ");
2597 for (i = 0; i < VEC_length (ipa_replace_map_p,
2598 node->clone.tree_map);
2599 i++)
2600 {
2601 struct ipa_replace_map *replace_info;
2602 replace_info = VEC_index (ipa_replace_map_p,
2603 node->clone.tree_map,
2604 i);
2605 print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
2606 fprintf (cgraph_dump_file, " -> ");
2607 print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
2608 fprintf (cgraph_dump_file, "%s%s;",
2609 replace_info->replace_p ? "(replace)":"",
2610 replace_info->ref_p ? "(ref)":"");
2611 }
2612 fprintf (cgraph_dump_file, "\n");
2613 }
2614 if (node->clone.args_to_skip)
2615 {
2616 fprintf (cgraph_dump_file, " args_to_skip: ");
2617 dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
2618 }
2619 if (node->clone.args_to_skip)
2620 {
2621 fprintf (cgraph_dump_file, " combined_args_to_skip:");
2622 dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
2623 }
2624 }
2625 cgraph_materialize_clone (node);
2626 stabilized = false;
2627 }
2628 }
2629 }
2630 }
2631 for (node = cgraph_nodes; node; node = node->next)
2632 if (!node->analyzed && node->callees)
2633 cgraph_node_remove_callees (node);
2634 if (cgraph_dump_file)
2635 fprintf (cgraph_dump_file, "Materialization Call site updates done.\n");
2636 #ifdef ENABLE_CHECKING
2637 verify_cgraph ();
2638 #endif
2639 cgraph_remove_unreachable_nodes (false, cgraph_dump_file);
2640 }
2641
2642
2643 /* Perform simple optimizations based on callgraph. */
2644
2645 void
2646 cgraph_optimize (void)
2647 {
2648 if (seen_error ())
2649 return;
2650
2651 #ifdef ENABLE_CHECKING
2652 verify_cgraph ();
2653 #endif
2654
2655 /* Frontend may output common variables after the unit has been finalized.
2656 It is safe to deal with them here as they are always zero initialized. */
2657 varpool_analyze_pending_decls ();
2658
2659 timevar_push (TV_CGRAPHOPT);
2660 if (pre_ipa_mem_report)
2661 {
2662 fprintf (stderr, "Memory consumption before IPA\n");
2663 dump_memory_report (false);
2664 }
2665 if (!quiet_flag)
2666 fprintf (stderr, "Performing interprocedural optimizations\n");
2667 cgraph_state = CGRAPH_STATE_IPA;
2668
2669 /* Don't run the IPA passes if there was any error or sorry messages. */
2670 if (!seen_error ())
2671 ipa_passes ();
2672
2673 /* Do nothing else if any IPA pass found errors or if we are just streaming LTO. */
2674 if (seen_error ()
2675 || (!in_lto_p && flag_lto && !flag_fat_lto_objects))
2676 {
2677 timevar_pop (TV_CGRAPHOPT);
2678 return;
2679 }
2680
2681 /* This pass remove bodies of extern inline functions we never inlined.
2682 Do this later so other IPA passes see what is really going on. */
2683 cgraph_remove_unreachable_nodes (false, dump_file);
2684 cgraph_global_info_ready = true;
2685 if (cgraph_dump_file)
2686 {
2687 fprintf (cgraph_dump_file, "Optimized ");
2688 dump_cgraph (cgraph_dump_file);
2689 dump_varpool (cgraph_dump_file);
2690 }
2691 if (post_ipa_mem_report)
2692 {
2693 fprintf (stderr, "Memory consumption after IPA\n");
2694 dump_memory_report (false);
2695 }
2696 timevar_pop (TV_CGRAPHOPT);
2697
2698 /* Output everything. */
2699 (*debug_hooks->assembly_start) ();
2700 if (!quiet_flag)
2701 fprintf (stderr, "Assembling functions:\n");
2702 #ifdef ENABLE_CHECKING
2703 verify_cgraph ();
2704 #endif
2705
2706 cgraph_materialize_all_clones ();
2707 bitmap_obstack_initialize (NULL);
2708 execute_ipa_pass_list (all_late_ipa_passes);
2709 cgraph_remove_unreachable_nodes (true, dump_file);
2710 #ifdef ENABLE_CHECKING
2711 verify_cgraph ();
2712 #endif
2713 bitmap_obstack_release (NULL);
2714 cgraph_mark_functions_to_output ();
2715 output_weakrefs ();
2716
2717 cgraph_state = CGRAPH_STATE_EXPANSION;
2718 if (!flag_toplevel_reorder)
2719 cgraph_output_in_order ();
2720 else
2721 {
2722 cgraph_output_pending_asms ();
2723
2724 cgraph_expand_all_functions ();
2725 varpool_remove_unreferenced_decls ();
2726
2727 varpool_assemble_pending_decls ();
2728 }
2729
2730 cgraph_process_new_functions ();
2731 cgraph_state = CGRAPH_STATE_FINISHED;
2732
2733 if (cgraph_dump_file)
2734 {
2735 fprintf (cgraph_dump_file, "\nFinal ");
2736 dump_cgraph (cgraph_dump_file);
2737 dump_varpool (cgraph_dump_file);
2738 }
2739 #ifdef ENABLE_CHECKING
2740 verify_cgraph ();
2741 /* Double check that all inline clones are gone and that all
2742 function bodies have been released from memory. */
2743 if (!seen_error ())
2744 {
2745 struct cgraph_node *node;
2746 bool error_found = false;
2747
2748 for (node = cgraph_nodes; node; node = node->next)
2749 if (node->analyzed
2750 && (node->global.inlined_to
2751 || gimple_has_body_p (node->symbol.decl)))
2752 {
2753 error_found = true;
2754 dump_cgraph_node (stderr, node);
2755 }
2756 if (error_found)
2757 internal_error ("nodes with unreleased memory found");
2758 }
2759 #endif
2760 }
2761
2762
2763 /* Analyze the whole compilation unit once it is parsed completely. */
2764
2765 void
2766 cgraph_finalize_compilation_unit (void)
2767 {
2768 timevar_push (TV_CGRAPH);
2769
2770 /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
2771 if (flag_lto)
2772 lto_streamer_hooks_init ();
2773
2774 /* If we're here there's no current function anymore. Some frontends
2775 are lazy in clearing these. */
2776 current_function_decl = NULL;
2777 set_cfun (NULL);
2778
2779 /* Do not skip analyzing the functions if there were errors, we
2780 miss diagnostics for following functions otherwise. */
2781
2782 /* Emit size functions we didn't inline. */
2783 finalize_size_functions ();
2784
2785 /* Mark alias targets necessary and emit diagnostics. */
2786 finish_aliases_1 ();
2787 handle_alias_pairs ();
2788
2789 if (!quiet_flag)
2790 {
2791 fprintf (stderr, "\nAnalyzing compilation unit\n");
2792 fflush (stderr);
2793 }
2794
2795 if (flag_dump_passes)
2796 dump_passes ();
2797
2798 /* Gimplify and lower all functions, compute reachability and
2799 remove unreachable nodes. */
2800 cgraph_analyze_functions ();
2801
2802 /* Mark alias targets necessary and emit diagnostics. */
2803 finish_aliases_1 ();
2804 handle_alias_pairs ();
2805
2806 /* Gimplify and lower thunks. */
2807 cgraph_analyze_functions ();
2808
2809 /* Finally drive the pass manager. */
2810 cgraph_optimize ();
2811
2812 timevar_pop (TV_CGRAPH);
2813 }
2814
2815
2816 #include "gt-cgraphunit.h"