cgraphunit.c (decide_is_function_needed): Do not keep always_inline functions.
[gcc.git] / gcc / cgraphunit.c
1 /* Callgraph based interprocedural optimizations.
2 Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
21
22 /* This module implements main driver of compilation process as well as
23 few basic interprocedural optimizers.
24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
35 (There is one exception needed for implementing GCC extern inline
36 function.)
37
38 - varpool_finalize_variable
39
40 This function has same behavior as the above but is used for static
41 variables.
42
43 - cgraph_finalize_compilation_unit
44
45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
47
48 In the unit-at-a-time the call-graph construction and local function
49 analysis takes place here. Bodies of unreachable functions are released
50 to conserve memory usage.
51
52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
54
55 - cgraph_optimize
56
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
61
62 - cgraph_mark_needed_node
63 - varpool_mark_needed_node
64
65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
69 used by C++ frontend to explicitly mark the keyed methods.
70
71 - analyze_expr callback
72
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
76
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
79
80 - expand_function callback
81
82 This function is used to expand function and pass it into RTL back-end.
83 Front-end should not make any assumptions about when this function can be
84 called. In particular cgraph_assemble_pending_functions,
85 varpool_assemble_pending_variables, cgraph_finalize_function,
86 varpool_finalize_function, cgraph_optimize can cause arbitrarily
87 previously finalized functions to be expanded.
88
89 We implement two compilation modes.
90
91 - unit-at-a-time: In this mode analyzing of all functions is deferred
92 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
93
94 In cgraph_finalize_compilation_unit the reachable functions are
95 analyzed. During analysis the call-graph edges from reachable
96 functions are constructed and their destinations are marked as
97 reachable. References to functions and variables are discovered too
98 and variables found to be needed output to the assembly file. Via
99 mark_referenced call in assemble_variable functions referenced by
100 static variables are noticed too.
101
102 The intra-procedural information is produced and its existence
103 indicated by global_info_ready. Once this flag is set it is impossible
104 to change function from !reachable to reachable and thus
105 assemble_variable no longer call mark_referenced.
106
107 Finally the call-graph is topologically sorted and all reachable functions
108 that has not been completely inlined or are not external are output.
109
110 ??? It is possible that reference to function or variable is optimized
111 out. We can not deal with this nicely because topological order is not
112 suitable for it. For tree-ssa we may consider another pass doing
113 optimization and re-discovering reachable functions.
114
115 ??? Reorganize code so variables are output very last and only if they
116 really has been referenced by produced code, so we catch more cases
117 where reference has been optimized out.
118
119 - non-unit-at-a-time
120
121 All functions are variables are output as early as possible to conserve
122 memory consumption. This may or may not result in less memory used but
123 it is still needed for some legacy code that rely on particular ordering
124 of things output from the compiler.
125
126 Varpool data structures are not used and variables are output directly.
127
128 Functions are output early using call of
129 cgraph_assemble_pending_function from cgraph_finalize_function. The
130 decision on whether function is needed is made more conservative so
131 uninlininable static functions are needed too. During the call-graph
132 construction the edge destinations are not marked as reachable and it
133 is completely relied upn assemble_variable to mark them. */
134
135
136 #include "config.h"
137 #include "system.h"
138 #include "coretypes.h"
139 #include "tm.h"
140 #include "tree.h"
141 #include "rtl.h"
142 #include "tree-flow.h"
143 #include "tree-inline.h"
144 #include "langhooks.h"
145 #include "pointer-set.h"
146 #include "toplev.h"
147 #include "flags.h"
148 #include "ggc.h"
149 #include "debug.h"
150 #include "target.h"
151 #include "cgraph.h"
152 #include "diagnostic.h"
153 #include "timevar.h"
154 #include "params.h"
155 #include "fibheap.h"
156 #include "c-common.h"
157 #include "intl.h"
158 #include "function.h"
159 #include "ipa-prop.h"
160 #include "tree-gimple.h"
161 #include "tree-pass.h"
162 #include "output.h"
163
164 static void cgraph_expand_all_functions (void);
165 static void cgraph_mark_functions_to_output (void);
166 static void cgraph_expand_function (struct cgraph_node *);
167 static void cgraph_output_pending_asms (void);
168
169 static FILE *cgraph_dump_file;
170
171 /* Determine if function DECL is needed. That is, visible to something
172 either outside this translation unit, something magic in the system
173 configury, or (if not doing unit-at-a-time) to something we havn't
174 seen yet. */
175
176 static bool
177 decide_is_function_needed (struct cgraph_node *node, tree decl)
178 {
179 tree origin;
180 if (MAIN_NAME_P (DECL_NAME (decl))
181 && TREE_PUBLIC (decl))
182 {
183 node->local.externally_visible = true;
184 return true;
185 }
186
187 /* If the user told us it is used, then it must be so. */
188 if (node->local.externally_visible)
189 return true;
190
191 if (!flag_unit_at_a_time && lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
192 return true;
193
194 /* ??? If the assembler name is set by hand, it is possible to assemble
195 the name later after finalizing the function and the fact is noticed
196 in assemble_name then. This is arguably a bug. */
197 if (DECL_ASSEMBLER_NAME_SET_P (decl)
198 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
199 return true;
200
201 /* With -fkeep-inline-functions we are keeping all inline functions except
202 for extern inline ones. */
203 if (flag_keep_inline_functions
204 && DECL_DECLARED_INLINE_P (decl)
205 && !DECL_EXTERNAL (decl)
206 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl)))
207 return true;
208
209 /* If we decided it was needed before, but at the time we didn't have
210 the body of the function available, then it's still needed. We have
211 to go back and re-check its dependencies now. */
212 if (node->needed)
213 return true;
214
215 /* Externally visible functions must be output. The exception is
216 COMDAT functions that must be output only when they are needed.
217
218 When not optimizing, also output the static functions. (see
219 PR24561), but don't do so for always_inline functions, functions
220 declared inline and nested functions. These was optimized out
221 in the original implementation and it is unclear whether we want
222 to change the behavior here. */
223 if (((TREE_PUBLIC (decl)
224 || (!optimize && !node->local.disregard_inline_limits
225 && !DECL_DECLARED_INLINE_P (decl)
226 && !node->origin))
227 && !flag_whole_program)
228 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
229 return true;
230
231 /* Constructors and destructors are reachable from the runtime by
232 some mechanism. */
233 if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
234 return true;
235
236 if (flag_unit_at_a_time)
237 return false;
238
239 /* If not doing unit at a time, then we'll only defer this function
240 if its marked for inlining. Otherwise we want to emit it now. */
241
242 /* "extern inline" functions are never output locally. */
243 if (DECL_EXTERNAL (decl))
244 return false;
245 /* Nested functions of extern inline function shall not be emit unless
246 we inlined the origin. */
247 for (origin = decl_function_context (decl); origin;
248 origin = decl_function_context (origin))
249 if (DECL_EXTERNAL (origin))
250 return false;
251 /* We want to emit COMDAT functions only when absolutely necessary. */
252 if (DECL_COMDAT (decl))
253 return false;
254 if (!DECL_INLINE (decl)
255 || (!node->local.disregard_inline_limits
256 /* When declared inline, defer even the uninlinable functions.
257 This allows them to be eliminated when unused. */
258 && !DECL_DECLARED_INLINE_P (decl)
259 && (!node->local.inlinable || !cgraph_default_inline_p (node, NULL))))
260 return true;
261
262 return false;
263 }
264
265 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
266 functions into callgraph in a way so they look like ordinary reachable
267 functions inserted into callgraph already at construction time. */
268
269 bool
270 cgraph_process_new_functions (void)
271 {
272 bool output = false;
273 tree fndecl;
274 struct cgraph_node *node;
275
276 /* Note that this queue may grow as its being processed, as the new
277 functions may generate new ones. */
278 while (cgraph_new_nodes)
279 {
280 node = cgraph_new_nodes;
281 fndecl = node->decl;
282 cgraph_new_nodes = cgraph_new_nodes->next_needed;
283 switch (cgraph_state)
284 {
285 case CGRAPH_STATE_CONSTRUCTION:
286 /* At construction time we just need to finalize function and move
287 it into reachable functions list. */
288
289 node->next_needed = NULL;
290 node->needed = node->reachable = false;
291 cgraph_finalize_function (fndecl, false);
292 cgraph_mark_reachable_node (node);
293 output = true;
294 break;
295
296 case CGRAPH_STATE_IPA:
297 case CGRAPH_STATE_IPA_SSA:
298 /* When IPA optimization already started, do all essential
299 transformations that has been already performed on the whole
300 cgraph but not on this function. */
301
302 tree_register_cfg_hooks ();
303 if (!node->analyzed)
304 cgraph_analyze_function (node);
305 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
306 current_function_decl = fndecl;
307 node->local.inlinable = tree_inlinable_function_p (fndecl);
308 node->local.self_insns = estimate_num_insns (fndecl,
309 &eni_inlining_weights);
310 node->local.disregard_inline_limits
311 = lang_hooks.tree_inlining.disregard_inline_limits (fndecl);
312 /* Inlining characteristics are maintained by the
313 cgraph_mark_inline. */
314 node->global.insns = node->local.self_insns;
315 if (flag_really_no_inline && !node->local.disregard_inline_limits)
316 node->local.inlinable = 0;
317 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
318 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
319 /* When not optimizing, be sure we run early local passes anyway
320 to expand OMP. */
321 || !optimize)
322 execute_pass_list (pass_early_local_passes.sub);
323 free_dominance_info (CDI_POST_DOMINATORS);
324 free_dominance_info (CDI_DOMINATORS);
325 pop_cfun ();
326 current_function_decl = NULL;
327 break;
328
329 case CGRAPH_STATE_EXPANSION:
330 /* Functions created during expansion shall be compiled
331 directly. */
332 node->output = 0;
333 cgraph_expand_function (node);
334 break;
335
336 default:
337 gcc_unreachable ();
338 break;
339 }
340 }
341 return output;
342 }
343
344 /* When not doing unit-at-a-time, output all functions enqueued.
345 Return true when such a functions were found. */
346
347 static bool
348 cgraph_assemble_pending_functions (void)
349 {
350 bool output = false;
351
352 if (flag_unit_at_a_time)
353 return false;
354
355 cgraph_output_pending_asms ();
356
357 while (cgraph_nodes_queue)
358 {
359 struct cgraph_node *n = cgraph_nodes_queue;
360
361 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
362 n->next_needed = NULL;
363 if (!n->global.inlined_to
364 && !n->alias
365 && !DECL_EXTERNAL (n->decl))
366 {
367 cgraph_expand_function (n);
368 output = true;
369 }
370 output |= cgraph_process_new_functions ();
371 }
372
373 return output;
374 }
375
376
377 /* As an GCC extension we allow redefinition of the function. The
378 semantics when both copies of bodies differ is not well defined.
379 We replace the old body with new body so in unit at a time mode
380 we always use new body, while in normal mode we may end up with
381 old body inlined into some functions and new body expanded and
382 inlined in others.
383
384 ??? It may make more sense to use one body for inlining and other
385 body for expanding the function but this is difficult to do. */
386
387 static void
388 cgraph_reset_node (struct cgraph_node *node)
389 {
390 /* If node->output is set, then this is a unit-at-a-time compilation
391 and we have already begun whole-unit analysis. This is *not*
392 testing for whether we've already emitted the function. That
393 case can be sort-of legitimately seen with real function
394 redefinition errors. I would argue that the front end should
395 never present us with such a case, but don't enforce that for now. */
396 gcc_assert (!node->output);
397
398 /* Reset our data structures so we can analyze the function again. */
399 memset (&node->local, 0, sizeof (node->local));
400 memset (&node->global, 0, sizeof (node->global));
401 memset (&node->rtl, 0, sizeof (node->rtl));
402 node->analyzed = false;
403 node->local.redefined_extern_inline = true;
404 node->local.finalized = false;
405
406 if (!flag_unit_at_a_time)
407 {
408 struct cgraph_node *n, *next;
409
410 for (n = cgraph_nodes; n; n = next)
411 {
412 next = n->next;
413 if (n->global.inlined_to == node)
414 cgraph_remove_node (n);
415 }
416 }
417
418 cgraph_node_remove_callees (node);
419
420 /* We may need to re-queue the node for assembling in case
421 we already proceeded it and ignored as not needed. */
422 if (node->reachable && !flag_unit_at_a_time)
423 {
424 struct cgraph_node *n;
425
426 for (n = cgraph_nodes_queue; n; n = n->next_needed)
427 if (n == node)
428 break;
429 if (!n)
430 node->reachable = 0;
431 }
432 }
433
434 static void
435 cgraph_lower_function (struct cgraph_node *node)
436 {
437 if (node->lowered)
438 return;
439 tree_lowering_passes (node->decl);
440 node->lowered = true;
441 }
442
443 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
444 logic in effect. If NESTED is true, then our caller cannot stand to have
445 the garbage collector run at the moment. We would need to either create
446 a new GC context, or just not compile right now. */
447
448 void
449 cgraph_finalize_function (tree decl, bool nested)
450 {
451 struct cgraph_node *node = cgraph_node (decl);
452
453 if (node->local.finalized)
454 cgraph_reset_node (node);
455
456 node->pid = cgraph_max_pid ++;
457 notice_global_symbol (decl);
458 node->decl = decl;
459 node->local.finalized = true;
460 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
461 if (node->nested)
462 lower_nested_functions (decl);
463 gcc_assert (!node->nested);
464
465 /* If not unit at a time, then we need to create the call graph
466 now, so that called functions can be queued and emitted now. */
467 if (!flag_unit_at_a_time)
468 cgraph_analyze_function (node);
469
470 if (decide_is_function_needed (node, decl))
471 cgraph_mark_needed_node (node);
472
473 /* Since we reclaim unreachable nodes at the end of every language
474 level unit, we need to be conservative about possible entry points
475 there. */
476 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)))
477 cgraph_mark_reachable_node (node);
478
479 /* If not unit at a time, go ahead and emit everything we've found
480 to be reachable at this time. */
481 if (!nested)
482 {
483 if (!cgraph_assemble_pending_functions ())
484 ggc_collect ();
485 }
486
487 /* If we've not yet emitted decl, tell the debug info about it. */
488 if (!TREE_ASM_WRITTEN (decl))
489 (*debug_hooks->deferred_inline_function) (decl);
490
491 /* Possibly warn about unused parameters. */
492 if (warn_unused_parameter)
493 do_warn_unused_parameter (decl);
494 }
495
496 /* Verify cgraph nodes of given cgraph node. */
497 void
498 verify_cgraph_node (struct cgraph_node *node)
499 {
500 struct cgraph_edge *e;
501 struct cgraph_node *main_clone;
502 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
503 basic_block this_block;
504 block_stmt_iterator bsi;
505 bool error_found = false;
506
507 if (errorcount || sorrycount)
508 return;
509
510 timevar_push (TV_CGRAPH_VERIFY);
511 for (e = node->callees; e; e = e->next_callee)
512 if (e->aux)
513 {
514 error ("aux field set for edge %s->%s",
515 cgraph_node_name (e->caller), cgraph_node_name (e->callee));
516 error_found = true;
517 }
518 if (node->count < 0)
519 {
520 error ("Execution count is negative");
521 error_found = true;
522 }
523 for (e = node->callers; e; e = e->next_caller)
524 {
525 if (e->count < 0)
526 {
527 error ("caller edge count is negative");
528 error_found = true;
529 }
530 if (e->frequency < 0)
531 {
532 error ("caller edge frequency is negative");
533 error_found = true;
534 }
535 if (e->frequency > CGRAPH_FREQ_MAX)
536 {
537 error ("caller edge frequency is too large");
538 error_found = true;
539 }
540 if (!e->inline_failed)
541 {
542 if (node->global.inlined_to
543 != (e->caller->global.inlined_to
544 ? e->caller->global.inlined_to : e->caller))
545 {
546 error ("inlined_to pointer is wrong");
547 error_found = true;
548 }
549 if (node->callers->next_caller)
550 {
551 error ("multiple inline callers");
552 error_found = true;
553 }
554 }
555 else
556 if (node->global.inlined_to)
557 {
558 error ("inlined_to pointer set for noninline callers");
559 error_found = true;
560 }
561 }
562 if (!node->callers && node->global.inlined_to)
563 {
564 error ("inlined_to pointer is set but no predecessors found");
565 error_found = true;
566 }
567 if (node->global.inlined_to == node)
568 {
569 error ("inlined_to pointer refers to itself");
570 error_found = true;
571 }
572
573 for (main_clone = cgraph_node (node->decl); main_clone;
574 main_clone = main_clone->next_clone)
575 if (main_clone == node)
576 break;
577 if (!cgraph_node (node->decl))
578 {
579 error ("node not found in cgraph_hash");
580 error_found = true;
581 }
582
583 if (node->analyzed
584 && DECL_SAVED_TREE (node->decl) && !TREE_ASM_WRITTEN (node->decl)
585 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to))
586 {
587 if (this_cfun->cfg)
588 {
589 /* The nodes we're interested in are never shared, so walk
590 the tree ignoring duplicates. */
591 struct pointer_set_t *visited_nodes = pointer_set_create ();
592 /* Reach the trees by walking over the CFG, and note the
593 enclosing basic-blocks in the call edges. */
594 FOR_EACH_BB_FN (this_block, this_cfun)
595 for (bsi = bsi_start (this_block); !bsi_end_p (bsi); bsi_next (&bsi))
596 {
597 tree stmt = bsi_stmt (bsi);
598 tree call = get_call_expr_in (stmt);
599 tree decl;
600 if (call && (decl = get_callee_fndecl (call)))
601 {
602 struct cgraph_edge *e = cgraph_edge (node, stmt);
603 if (e)
604 {
605 if (e->aux)
606 {
607 error ("shared call_stmt:");
608 debug_generic_stmt (stmt);
609 error_found = true;
610 }
611 if (e->callee->decl != cgraph_node (decl)->decl
612 && e->inline_failed)
613 {
614 error ("edge points to wrong declaration:");
615 debug_tree (e->callee->decl);
616 fprintf (stderr," Instead of:");
617 debug_tree (decl);
618 }
619 e->aux = (void *)1;
620 }
621 else
622 {
623 error ("missing callgraph edge for call stmt:");
624 debug_generic_stmt (stmt);
625 error_found = true;
626 }
627 }
628 }
629 pointer_set_destroy (visited_nodes);
630 }
631 else
632 /* No CFG available?! */
633 gcc_unreachable ();
634
635 for (e = node->callees; e; e = e->next_callee)
636 {
637 if (!e->aux)
638 {
639 error ("edge %s->%s has no corresponding call_stmt",
640 cgraph_node_name (e->caller),
641 cgraph_node_name (e->callee));
642 debug_generic_stmt (e->call_stmt);
643 error_found = true;
644 }
645 e->aux = 0;
646 }
647 }
648 if (error_found)
649 {
650 dump_cgraph_node (stderr, node);
651 internal_error ("verify_cgraph_node failed");
652 }
653 timevar_pop (TV_CGRAPH_VERIFY);
654 }
655
656 /* Verify whole cgraph structure. */
657 void
658 verify_cgraph (void)
659 {
660 struct cgraph_node *node;
661
662 if (sorrycount || errorcount)
663 return;
664
665 for (node = cgraph_nodes; node; node = node->next)
666 verify_cgraph_node (node);
667 }
668
669 /* Output all asm statements we have stored up to be output. */
670
671 static void
672 cgraph_output_pending_asms (void)
673 {
674 struct cgraph_asm_node *can;
675
676 if (errorcount || sorrycount)
677 return;
678
679 for (can = cgraph_asm_nodes; can; can = can->next)
680 assemble_asm (can->asm_str);
681 cgraph_asm_nodes = NULL;
682 }
683
684 /* Analyze the function scheduled to be output. */
685 void
686 cgraph_analyze_function (struct cgraph_node *node)
687 {
688 tree decl = node->decl;
689
690 current_function_decl = decl;
691 push_cfun (DECL_STRUCT_FUNCTION (decl));
692 cgraph_lower_function (node);
693
694 node->local.estimated_self_stack_size = estimated_stack_frame_size ();
695 node->global.estimated_stack_size = node->local.estimated_self_stack_size;
696 node->global.stack_frame_offset = 0;
697 node->local.inlinable = tree_inlinable_function_p (decl);
698 if (!flag_unit_at_a_time)
699 node->local.self_insns = estimate_num_insns (decl, &eni_inlining_weights);
700 if (node->local.inlinable)
701 node->local.disregard_inline_limits
702 = lang_hooks.tree_inlining.disregard_inline_limits (decl);
703 if (flag_really_no_inline && !node->local.disregard_inline_limits)
704 node->local.inlinable = 0;
705 /* Inlining characteristics are maintained by the cgraph_mark_inline. */
706 node->global.insns = node->local.self_insns;
707 if (!flag_unit_at_a_time)
708 {
709 bitmap_obstack_initialize (NULL);
710 tree_register_cfg_hooks ();
711 execute_pass_list (pass_early_local_passes.sub);
712 free_dominance_info (CDI_POST_DOMINATORS);
713 free_dominance_info (CDI_DOMINATORS);
714 bitmap_obstack_release (NULL);
715 }
716
717 node->analyzed = true;
718 pop_cfun ();
719 current_function_decl = NULL;
720 }
721
722 /* Look for externally_visible and used attributes and mark cgraph nodes
723 accordingly.
724
725 We cannot mark the nodes at the point the attributes are processed (in
726 handle_*_attribute) because the copy of the declarations available at that
727 point may not be canonical. For example, in:
728
729 void f();
730 void f() __attribute__((used));
731
732 the declaration we see in handle_used_attribute will be the second
733 declaration -- but the front end will subsequently merge that declaration
734 with the original declaration and discard the second declaration.
735
736 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
737
738 void f() {}
739 void f() __attribute__((externally_visible));
740
741 is valid.
742
743 So, we walk the nodes at the end of the translation unit, applying the
744 attributes at that point. */
745
746 static void
747 process_function_and_variable_attributes (struct cgraph_node *first,
748 struct varpool_node *first_var)
749 {
750 struct cgraph_node *node;
751 struct varpool_node *vnode;
752
753 for (node = cgraph_nodes; node != first; node = node->next)
754 {
755 tree decl = node->decl;
756 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
757 {
758 mark_decl_referenced (decl);
759 if (node->local.finalized)
760 cgraph_mark_needed_node (node);
761 }
762 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
763 {
764 if (! TREE_PUBLIC (node->decl))
765 warning (OPT_Wattributes,
766 "%J%<externally_visible%> attribute have effect only on public objects",
767 node->decl);
768 else
769 {
770 if (node->local.finalized)
771 cgraph_mark_needed_node (node);
772 node->local.externally_visible = true;
773 }
774 }
775 }
776 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
777 {
778 tree decl = vnode->decl;
779 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
780 {
781 mark_decl_referenced (decl);
782 if (vnode->finalized)
783 varpool_mark_needed_node (vnode);
784 }
785 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
786 {
787 if (! TREE_PUBLIC (vnode->decl))
788 warning (OPT_Wattributes,
789 "%J%<externally_visible%> attribute have effect only on public objects",
790 vnode->decl);
791 else
792 {
793 if (vnode->finalized)
794 varpool_mark_needed_node (vnode);
795 vnode->externally_visible = true;
796 }
797 }
798 }
799 }
800
801 /* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
802 each reachable functions) and build cgraph.
803 The function can be called multiple times after inserting new nodes
804 into beginning of queue. Just the new part of queue is re-scanned then. */
805
806 static void
807 cgraph_analyze_functions (void)
808 {
809 /* Keep track of already processed nodes when called multiple times for
810 intermodule optimization. */
811 static struct cgraph_node *first_analyzed;
812 struct cgraph_node *first_processed = first_analyzed;
813 static struct varpool_node *first_analyzed_var;
814 struct cgraph_node *node, *next;
815
816 process_function_and_variable_attributes (first_processed,
817 first_analyzed_var);
818 first_processed = cgraph_nodes;
819 first_analyzed_var = varpool_nodes;
820 varpool_analyze_pending_decls ();
821 if (cgraph_dump_file)
822 {
823 fprintf (cgraph_dump_file, "Initial entry points:");
824 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
825 if (node->needed && DECL_SAVED_TREE (node->decl))
826 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
827 fprintf (cgraph_dump_file, "\n");
828 }
829 cgraph_process_new_functions ();
830
831 /* Propagate reachability flag and lower representation of all reachable
832 functions. In the future, lowering will introduce new functions and
833 new entry points on the way (by template instantiation and virtual
834 method table generation for instance). */
835 while (cgraph_nodes_queue)
836 {
837 struct cgraph_edge *edge;
838 tree decl = cgraph_nodes_queue->decl;
839
840 node = cgraph_nodes_queue;
841 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
842 node->next_needed = NULL;
843
844 /* ??? It is possible to create extern inline function and later using
845 weak alias attribute to kill its body. See
846 gcc.c-torture/compile/20011119-1.c */
847 if (!DECL_SAVED_TREE (decl))
848 {
849 cgraph_reset_node (node);
850 continue;
851 }
852
853 gcc_assert (!node->analyzed && node->reachable);
854 gcc_assert (DECL_SAVED_TREE (decl));
855
856 cgraph_analyze_function (node);
857
858 for (edge = node->callees; edge; edge = edge->next_callee)
859 if (!edge->callee->reachable)
860 cgraph_mark_reachable_node (edge->callee);
861
862 /* We finalize local static variables during constructing callgraph
863 edges. Process their attributes too. */
864 process_function_and_variable_attributes (first_processed,
865 first_analyzed_var);
866 first_processed = cgraph_nodes;
867 first_analyzed_var = varpool_nodes;
868 varpool_analyze_pending_decls ();
869 cgraph_process_new_functions ();
870 }
871
872 /* Collect entry points to the unit. */
873 if (cgraph_dump_file)
874 {
875 fprintf (cgraph_dump_file, "Unit entry points:");
876 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
877 if (node->needed && DECL_SAVED_TREE (node->decl))
878 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
879 fprintf (cgraph_dump_file, "\n\nInitial ");
880 dump_cgraph (cgraph_dump_file);
881 }
882
883 if (cgraph_dump_file)
884 fprintf (cgraph_dump_file, "\nReclaiming functions:");
885
886 for (node = cgraph_nodes; node != first_analyzed; node = next)
887 {
888 tree decl = node->decl;
889 next = node->next;
890
891 if (node->local.finalized && !DECL_SAVED_TREE (decl))
892 cgraph_reset_node (node);
893
894 if (!node->reachable && DECL_SAVED_TREE (decl))
895 {
896 if (cgraph_dump_file)
897 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
898 cgraph_remove_node (node);
899 continue;
900 }
901 else
902 node->next_needed = NULL;
903 gcc_assert (!node->local.finalized || DECL_SAVED_TREE (decl));
904 gcc_assert (node->analyzed == node->local.finalized);
905 }
906 if (cgraph_dump_file)
907 {
908 fprintf (cgraph_dump_file, "\n\nReclaimed ");
909 dump_cgraph (cgraph_dump_file);
910 }
911 first_analyzed = cgraph_nodes;
912 ggc_collect ();
913 }
914
915 /* Analyze the whole compilation unit once it is parsed completely. */
916
917 void
918 cgraph_finalize_compilation_unit (void)
919 {
920 if (errorcount || sorrycount)
921 return;
922
923 finish_aliases_1 ();
924
925 if (!flag_unit_at_a_time)
926 {
927 cgraph_output_pending_asms ();
928 cgraph_assemble_pending_functions ();
929 varpool_output_debug_info ();
930 return;
931 }
932
933 if (!quiet_flag)
934 {
935 fprintf (stderr, "\nAnalyzing compilation unit\n");
936 fflush (stderr);
937 }
938
939 timevar_push (TV_CGRAPH);
940 cgraph_analyze_functions ();
941 timevar_pop (TV_CGRAPH);
942 }
943 /* Figure out what functions we want to assemble. */
944
945 static void
946 cgraph_mark_functions_to_output (void)
947 {
948 struct cgraph_node *node;
949
950 for (node = cgraph_nodes; node; node = node->next)
951 {
952 tree decl = node->decl;
953 struct cgraph_edge *e;
954
955 gcc_assert (!node->output);
956
957 for (e = node->callers; e; e = e->next_caller)
958 if (e->inline_failed)
959 break;
960
961 /* We need to output all local functions that are used and not
962 always inlined, as well as those that are reachable from
963 outside the current compilation unit. */
964 if (DECL_SAVED_TREE (decl)
965 && !node->global.inlined_to
966 && (node->needed
967 || (e && node->reachable))
968 && !TREE_ASM_WRITTEN (decl)
969 && !DECL_EXTERNAL (decl))
970 node->output = 1;
971 else
972 {
973 /* We should've reclaimed all functions that are not needed. */
974 #ifdef ENABLE_CHECKING
975 if (!node->global.inlined_to && DECL_SAVED_TREE (decl)
976 && !DECL_EXTERNAL (decl))
977 {
978 dump_cgraph_node (stderr, node);
979 internal_error ("failed to reclaim unneeded function");
980 }
981 #endif
982 gcc_assert (node->global.inlined_to || !DECL_SAVED_TREE (decl)
983 || DECL_EXTERNAL (decl));
984
985 }
986
987 }
988 }
989
990 /* Expand function specified by NODE. */
991
992 static void
993 cgraph_expand_function (struct cgraph_node *node)
994 {
995 enum debug_info_type save_write_symbols = NO_DEBUG;
996 const struct gcc_debug_hooks *save_debug_hooks = NULL;
997 tree decl = node->decl;
998
999 /* We ought to not compile any inline clones. */
1000 gcc_assert (!node->global.inlined_to);
1001
1002 if (flag_unit_at_a_time)
1003 announce_function (decl);
1004
1005 gcc_assert (node->lowered);
1006
1007 if (DECL_IGNORED_P (decl))
1008 {
1009 save_write_symbols = write_symbols;
1010 write_symbols = NO_DEBUG;
1011 save_debug_hooks = debug_hooks;
1012 debug_hooks = &do_nothing_debug_hooks;
1013 }
1014
1015 /* Generate RTL for the body of DECL. */
1016 lang_hooks.callgraph.expand_function (decl);
1017
1018 /* Make sure that BE didn't give up on compiling. */
1019 /* ??? Can happen with nested function of extern inline. */
1020 gcc_assert (TREE_ASM_WRITTEN (node->decl));
1021
1022 if (DECL_IGNORED_P (decl))
1023 {
1024 write_symbols = save_write_symbols;
1025 debug_hooks = save_debug_hooks;
1026 }
1027
1028 current_function_decl = NULL;
1029 if (!cgraph_preserve_function_body_p (node->decl))
1030 {
1031 cgraph_release_function_body (node);
1032 /* Eliminate all call edges. This is important so the call_expr no longer
1033 points to the dead function body. */
1034 cgraph_node_remove_callees (node);
1035 }
1036
1037 cgraph_function_flags_ready = true;
1038 }
1039
1040 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
1041
1042 bool
1043 cgraph_inline_p (struct cgraph_edge *e, const char **reason)
1044 {
1045 *reason = e->inline_failed;
1046 return !e->inline_failed;
1047 }
1048
1049
1050
1051 /* Expand all functions that must be output.
1052
1053 Attempt to topologically sort the nodes so function is output when
1054 all called functions are already assembled to allow data to be
1055 propagated across the callgraph. Use a stack to get smaller distance
1056 between a function and its callees (later we may choose to use a more
1057 sophisticated algorithm for function reordering; we will likely want
1058 to use subsections to make the output functions appear in top-down
1059 order). */
1060
1061 static void
1062 cgraph_expand_all_functions (void)
1063 {
1064 struct cgraph_node *node;
1065 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1066 int order_pos = 0, new_order_pos = 0;
1067 int i;
1068
1069 order_pos = cgraph_postorder (order);
1070 gcc_assert (order_pos == cgraph_n_nodes);
1071
1072 /* Garbage collector may remove inline clones we eliminate during
1073 optimization. So we must be sure to not reference them. */
1074 for (i = 0; i < order_pos; i++)
1075 if (order[i]->output)
1076 order[new_order_pos++] = order[i];
1077
1078 for (i = new_order_pos - 1; i >= 0; i--)
1079 {
1080 node = order[i];
1081 if (node->output)
1082 {
1083 gcc_assert (node->reachable);
1084 node->output = 0;
1085 cgraph_expand_function (node);
1086 }
1087 }
1088 cgraph_process_new_functions ();
1089
1090 free (order);
1091
1092 }
1093
1094 /* This is used to sort the node types by the cgraph order number. */
1095
1096 struct cgraph_order_sort
1097 {
1098 enum { ORDER_UNDEFINED = 0, ORDER_FUNCTION, ORDER_VAR, ORDER_ASM } kind;
1099 union
1100 {
1101 struct cgraph_node *f;
1102 struct varpool_node *v;
1103 struct cgraph_asm_node *a;
1104 } u;
1105 };
1106
1107 /* Output all functions, variables, and asm statements in the order
1108 according to their order fields, which is the order in which they
1109 appeared in the file. This implements -fno-toplevel-reorder. In
1110 this mode we may output functions and variables which don't really
1111 need to be output. */
1112
1113 static void
1114 cgraph_output_in_order (void)
1115 {
1116 int max;
1117 size_t size;
1118 struct cgraph_order_sort *nodes;
1119 int i;
1120 struct cgraph_node *pf;
1121 struct varpool_node *pv;
1122 struct cgraph_asm_node *pa;
1123
1124 max = cgraph_order;
1125 size = max * sizeof (struct cgraph_order_sort);
1126 nodes = (struct cgraph_order_sort *) alloca (size);
1127 memset (nodes, 0, size);
1128
1129 varpool_analyze_pending_decls ();
1130
1131 for (pf = cgraph_nodes; pf; pf = pf->next)
1132 {
1133 if (pf->output)
1134 {
1135 i = pf->order;
1136 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1137 nodes[i].kind = ORDER_FUNCTION;
1138 nodes[i].u.f = pf;
1139 }
1140 }
1141
1142 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
1143 {
1144 i = pv->order;
1145 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1146 nodes[i].kind = ORDER_VAR;
1147 nodes[i].u.v = pv;
1148 }
1149
1150 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1151 {
1152 i = pa->order;
1153 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1154 nodes[i].kind = ORDER_ASM;
1155 nodes[i].u.a = pa;
1156 }
1157
1158 for (i = 0; i < max; ++i)
1159 {
1160 switch (nodes[i].kind)
1161 {
1162 case ORDER_FUNCTION:
1163 nodes[i].u.f->output = 0;
1164 cgraph_expand_function (nodes[i].u.f);
1165 break;
1166
1167 case ORDER_VAR:
1168 varpool_assemble_decl (nodes[i].u.v);
1169 break;
1170
1171 case ORDER_ASM:
1172 assemble_asm (nodes[i].u.a->asm_str);
1173 break;
1174
1175 case ORDER_UNDEFINED:
1176 break;
1177
1178 default:
1179 gcc_unreachable ();
1180 }
1181 }
1182
1183 cgraph_asm_nodes = NULL;
1184 }
1185
1186 /* Return true when function body of DECL still needs to be kept around
1187 for later re-use. */
1188 bool
1189 cgraph_preserve_function_body_p (tree decl)
1190 {
1191 struct cgraph_node *node;
1192 if (!cgraph_global_info_ready)
1193 return (flag_really_no_inline
1194 ? lang_hooks.tree_inlining.disregard_inline_limits (decl)
1195 : DECL_INLINE (decl));
1196 /* Look if there is any clone around. */
1197 for (node = cgraph_node (decl); node; node = node->next_clone)
1198 if (node->global.inlined_to)
1199 return true;
1200 return false;
1201 }
1202
1203 static void
1204 ipa_passes (void)
1205 {
1206 cfun = NULL;
1207 current_function_decl = NULL;
1208 tree_register_cfg_hooks ();
1209 bitmap_obstack_initialize (NULL);
1210 execute_ipa_pass_list (all_ipa_passes);
1211 bitmap_obstack_release (NULL);
1212 }
1213
1214 /* Perform simple optimizations based on callgraph. */
1215
1216 void
1217 cgraph_optimize (void)
1218 {
1219 if (errorcount || sorrycount)
1220 return;
1221
1222 #ifdef ENABLE_CHECKING
1223 verify_cgraph ();
1224 #endif
1225 if (!flag_unit_at_a_time)
1226 {
1227 cgraph_assemble_pending_functions ();
1228 cgraph_process_new_functions ();
1229 cgraph_state = CGRAPH_STATE_FINISHED;
1230 cgraph_output_pending_asms ();
1231 varpool_assemble_pending_decls ();
1232 varpool_output_debug_info ();
1233 return;
1234 }
1235
1236 /* Frontend may output common variables after the unit has been finalized.
1237 It is safe to deal with them here as they are always zero initialized. */
1238 varpool_analyze_pending_decls ();
1239 cgraph_analyze_functions ();
1240
1241 timevar_push (TV_CGRAPHOPT);
1242 if (pre_ipa_mem_report)
1243 {
1244 fprintf (stderr, "Memory consumption before IPA\n");
1245 dump_memory_report (false);
1246 }
1247 if (!quiet_flag)
1248 fprintf (stderr, "Performing interprocedural optimizations\n");
1249 cgraph_state = CGRAPH_STATE_IPA;
1250
1251 /* Don't run the IPA passes if there was any error or sorry messages. */
1252 if (errorcount == 0 && sorrycount == 0)
1253 ipa_passes ();
1254
1255 /* This pass remove bodies of extern inline functions we never inlined.
1256 Do this later so other IPA passes see what is really going on. */
1257 cgraph_remove_unreachable_nodes (false, dump_file);
1258 cgraph_global_info_ready = true;
1259 if (cgraph_dump_file)
1260 {
1261 fprintf (cgraph_dump_file, "Optimized ");
1262 dump_cgraph (cgraph_dump_file);
1263 dump_varpool (cgraph_dump_file);
1264 }
1265 if (post_ipa_mem_report)
1266 {
1267 fprintf (stderr, "Memory consumption after IPA\n");
1268 dump_memory_report (false);
1269 }
1270 timevar_pop (TV_CGRAPHOPT);
1271
1272 /* Output everything. */
1273 if (!quiet_flag)
1274 fprintf (stderr, "Assembling functions:\n");
1275 #ifdef ENABLE_CHECKING
1276 verify_cgraph ();
1277 #endif
1278
1279 cgraph_mark_functions_to_output ();
1280
1281 cgraph_state = CGRAPH_STATE_EXPANSION;
1282 if (!flag_toplevel_reorder)
1283 cgraph_output_in_order ();
1284 else
1285 {
1286 cgraph_output_pending_asms ();
1287
1288 cgraph_expand_all_functions ();
1289 varpool_remove_unreferenced_decls ();
1290
1291 varpool_assemble_pending_decls ();
1292 varpool_output_debug_info ();
1293 }
1294 cgraph_process_new_functions ();
1295 cgraph_state = CGRAPH_STATE_FINISHED;
1296
1297 if (cgraph_dump_file)
1298 {
1299 fprintf (cgraph_dump_file, "\nFinal ");
1300 dump_cgraph (cgraph_dump_file);
1301 }
1302 #ifdef ENABLE_CHECKING
1303 verify_cgraph ();
1304 /* Double check that all inline clones are gone and that all
1305 function bodies have been released from memory. */
1306 if (flag_unit_at_a_time
1307 && !(sorrycount || errorcount))
1308 {
1309 struct cgraph_node *node;
1310 bool error_found = false;
1311
1312 for (node = cgraph_nodes; node; node = node->next)
1313 if (node->analyzed
1314 && (node->global.inlined_to
1315 || DECL_SAVED_TREE (node->decl)))
1316 {
1317 error_found = true;
1318 dump_cgraph_node (stderr, node);
1319 }
1320 if (error_found)
1321 internal_error ("nodes with no released memory found");
1322 }
1323 #endif
1324 }
1325 /* Generate and emit a static constructor or destructor. WHICH must be
1326 one of 'I' or 'D'. BODY should be a STATEMENT_LIST containing
1327 GENERIC statements. */
1328
1329 void
1330 cgraph_build_static_cdtor (char which, tree body, int priority)
1331 {
1332 static int counter = 0;
1333 char which_buf[16];
1334 tree decl, name, resdecl;
1335
1336 sprintf (which_buf, "%c_%d", which, counter++);
1337 name = get_file_function_name (which_buf);
1338
1339 decl = build_decl (FUNCTION_DECL, name,
1340 build_function_type (void_type_node, void_list_node));
1341 current_function_decl = decl;
1342
1343 resdecl = build_decl (RESULT_DECL, NULL_TREE, void_type_node);
1344 DECL_ARTIFICIAL (resdecl) = 1;
1345 DECL_IGNORED_P (resdecl) = 1;
1346 DECL_RESULT (decl) = resdecl;
1347
1348 allocate_struct_function (decl);
1349
1350 TREE_STATIC (decl) = 1;
1351 TREE_USED (decl) = 1;
1352 DECL_ARTIFICIAL (decl) = 1;
1353 DECL_IGNORED_P (decl) = 1;
1354 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1355 DECL_SAVED_TREE (decl) = body;
1356 TREE_PUBLIC (decl) = ! targetm.have_ctors_dtors;
1357 DECL_UNINLINABLE (decl) = 1;
1358
1359 DECL_INITIAL (decl) = make_node (BLOCK);
1360 TREE_USED (DECL_INITIAL (decl)) = 1;
1361
1362 DECL_SOURCE_LOCATION (decl) = input_location;
1363 cfun->function_end_locus = input_location;
1364
1365 switch (which)
1366 {
1367 case 'I':
1368 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1369 break;
1370 case 'D':
1371 DECL_STATIC_DESTRUCTOR (decl) = 1;
1372 break;
1373 default:
1374 gcc_unreachable ();
1375 }
1376
1377 gimplify_function_tree (decl);
1378
1379 cgraph_add_new_function (decl, false);
1380 cgraph_mark_needed_node (cgraph_node (decl));
1381
1382 if (targetm.have_ctors_dtors)
1383 {
1384 void (*fn) (rtx, int);
1385
1386 if (which == 'I')
1387 fn = targetm.asm_out.constructor;
1388 else
1389 fn = targetm.asm_out.destructor;
1390 fn (XEXP (DECL_RTL (decl), 0), priority);
1391 }
1392 }
1393
1394 void
1395 init_cgraph (void)
1396 {
1397 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1398 }
1399
1400 /* The edges representing the callers of the NEW_VERSION node were
1401 fixed by cgraph_function_versioning (), now the call_expr in their
1402 respective tree code should be updated to call the NEW_VERSION. */
1403
1404 static void
1405 update_call_expr (struct cgraph_node *new_version)
1406 {
1407 struct cgraph_edge *e;
1408
1409 gcc_assert (new_version);
1410 for (e = new_version->callers; e; e = e->next_caller)
1411 /* Update the call expr on the edges
1412 to call the new version. */
1413 TREE_OPERAND (CALL_EXPR_FN (get_call_expr_in (e->call_stmt)), 0) = new_version->decl;
1414 }
1415
1416
1417 /* Create a new cgraph node which is the new version of
1418 OLD_VERSION node. REDIRECT_CALLERS holds the callers
1419 edges which should be redirected to point to
1420 NEW_VERSION. ALL the callees edges of OLD_VERSION
1421 are cloned to the new version node. Return the new
1422 version node. */
1423
1424 static struct cgraph_node *
1425 cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
1426 tree new_decl,
1427 VEC(cgraph_edge_p,heap) *redirect_callers)
1428 {
1429 struct cgraph_node *new_version;
1430 struct cgraph_edge *e, *new_e;
1431 struct cgraph_edge *next_callee;
1432 unsigned i;
1433
1434 gcc_assert (old_version);
1435
1436 new_version = cgraph_node (new_decl);
1437
1438 new_version->analyzed = true;
1439 new_version->local = old_version->local;
1440 new_version->global = old_version->global;
1441 new_version->rtl = new_version->rtl;
1442 new_version->reachable = true;
1443 new_version->count = old_version->count;
1444
1445 /* Clone the old node callees. Recursive calls are
1446 also cloned. */
1447 for (e = old_version->callees;e; e=e->next_callee)
1448 {
1449 new_e = cgraph_clone_edge (e, new_version, e->call_stmt, 0, e->frequency,
1450 e->loop_nest, true);
1451 new_e->count = e->count;
1452 }
1453 /* Fix recursive calls.
1454 If OLD_VERSION has a recursive call after the
1455 previous edge cloning, the new version will have an edge
1456 pointing to the old version, which is wrong;
1457 Redirect it to point to the new version. */
1458 for (e = new_version->callees ; e; e = next_callee)
1459 {
1460 next_callee = e->next_callee;
1461 if (e->callee == old_version)
1462 cgraph_redirect_edge_callee (e, new_version);
1463
1464 if (!next_callee)
1465 break;
1466 }
1467 for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
1468 {
1469 /* Redirect calls to the old version node to point to its new
1470 version. */
1471 cgraph_redirect_edge_callee (e, new_version);
1472 }
1473
1474 return new_version;
1475 }
1476
1477 /* Perform function versioning.
1478 Function versioning includes copying of the tree and
1479 a callgraph update (creating a new cgraph node and updating
1480 its callees and callers).
1481
1482 REDIRECT_CALLERS varray includes the edges to be redirected
1483 to the new version.
1484
1485 TREE_MAP is a mapping of tree nodes we want to replace with
1486 new ones (according to results of prior analysis).
1487 OLD_VERSION_NODE is the node that is versioned.
1488 It returns the new version's cgraph node. */
1489
1490 struct cgraph_node *
1491 cgraph_function_versioning (struct cgraph_node *old_version_node,
1492 VEC(cgraph_edge_p,heap) *redirect_callers,
1493 varray_type tree_map)
1494 {
1495 tree old_decl = old_version_node->decl;
1496 struct cgraph_node *new_version_node = NULL;
1497 tree new_decl;
1498
1499 if (!tree_versionable_function_p (old_decl))
1500 return NULL;
1501
1502 /* Make a new FUNCTION_DECL tree node for the
1503 new version. */
1504 new_decl = copy_node (old_decl);
1505
1506 /* Create the new version's call-graph node.
1507 and update the edges of the new node. */
1508 new_version_node =
1509 cgraph_copy_node_for_versioning (old_version_node, new_decl,
1510 redirect_callers);
1511
1512 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1513 tree_function_versioning (old_decl, new_decl, tree_map, false);
1514 /* Update the call_expr on the edges to call the new version node. */
1515 update_call_expr (new_version_node);
1516
1517 /* Update the new version's properties.
1518 Make The new version visible only within this translation unit.
1519 ??? We cannot use COMDAT linkage because there is no
1520 ABI support for this. */
1521 DECL_EXTERNAL (new_version_node->decl) = 0;
1522 DECL_ONE_ONLY (new_version_node->decl) = 0;
1523 TREE_PUBLIC (new_version_node->decl) = 0;
1524 DECL_COMDAT (new_version_node->decl) = 0;
1525 new_version_node->local.externally_visible = 0;
1526 new_version_node->local.local = 1;
1527 new_version_node->lowered = true;
1528 return new_version_node;
1529 }
1530
1531 /* Produce separate function body for inline clones so the offline copy can be
1532 modified without affecting them. */
1533 struct cgraph_node *
1534 save_inline_function_body (struct cgraph_node *node)
1535 {
1536 struct cgraph_node *first_clone;
1537
1538 gcc_assert (node == cgraph_node (node->decl));
1539
1540 cgraph_lower_function (node);
1541
1542 /* In non-unit-at-a-time we construct full fledged clone we never output to
1543 assembly file. This clone is pointed out by inline_decl of original function
1544 and inlining infrastructure knows how to deal with this. */
1545 if (!flag_unit_at_a_time)
1546 {
1547 struct cgraph_edge *e;
1548
1549 first_clone = cgraph_clone_node (node, node->count, 0, CGRAPH_FREQ_BASE,
1550 false);
1551 first_clone->needed = 0;
1552 first_clone->reachable = 1;
1553 /* Recursively clone all bodies. */
1554 for (e = first_clone->callees; e; e = e->next_callee)
1555 if (!e->inline_failed)
1556 cgraph_clone_inlined_nodes (e, true, false);
1557 }
1558 else
1559 first_clone = node->next_clone;
1560
1561 first_clone->decl = copy_node (node->decl);
1562 node->next_clone = NULL;
1563 if (!flag_unit_at_a_time)
1564 node->inline_decl = first_clone->decl;
1565 first_clone->prev_clone = NULL;
1566 cgraph_insert_node_to_hashtable (first_clone);
1567 gcc_assert (first_clone == cgraph_node (first_clone->decl));
1568
1569 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1570 tree_function_versioning (node->decl, first_clone->decl, NULL, true);
1571
1572 DECL_EXTERNAL (first_clone->decl) = 0;
1573 DECL_ONE_ONLY (first_clone->decl) = 0;
1574 TREE_PUBLIC (first_clone->decl) = 0;
1575 DECL_COMDAT (first_clone->decl) = 0;
1576
1577 for (node = first_clone->next_clone; node; node = node->next_clone)
1578 node->decl = first_clone->decl;
1579 #ifdef ENABLE_CHECKING
1580 verify_cgraph_node (first_clone);
1581 #endif
1582 return first_clone;
1583 }