target.h (globalize_decl_name): New.
[gcc.git] / gcc / cgraphunit.c
1 /* Callgraph based interprocedural optimizations.
2 Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
21
22 /* This module implements main driver of compilation process as well as
23 few basic interprocedural optimizers.
24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
35 (There is one exception needed for implementing GCC extern inline
36 function.)
37
38 - varpool_finalize_variable
39
40 This function has same behavior as the above but is used for static
41 variables.
42
43 - cgraph_finalize_compilation_unit
44
45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
47
48 In the unit-at-a-time the call-graph construction and local function
49 analysis takes place here. Bodies of unreachable functions are released
50 to conserve memory usage.
51
52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
54
55 - cgraph_optimize
56
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
61
62 - cgraph_mark_needed_node
63 - varpool_mark_needed_node
64
65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
69 used by C++ frontend to explicitly mark the keyed methods.
70
71 - analyze_expr callback
72
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
76
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
79
80 - expand_function callback
81
82 This function is used to expand function and pass it into RTL back-end.
83 Front-end should not make any assumptions about when this function can be
84 called. In particular cgraph_assemble_pending_functions,
85 varpool_assemble_pending_variables, cgraph_finalize_function,
86 varpool_finalize_function, cgraph_optimize can cause arbitrarily
87 previously finalized functions to be expanded.
88
89 We implement two compilation modes.
90
91 - unit-at-a-time: In this mode analyzing of all functions is deferred
92 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
93
94 In cgraph_finalize_compilation_unit the reachable functions are
95 analyzed. During analysis the call-graph edges from reachable
96 functions are constructed and their destinations are marked as
97 reachable. References to functions and variables are discovered too
98 and variables found to be needed output to the assembly file. Via
99 mark_referenced call in assemble_variable functions referenced by
100 static variables are noticed too.
101
102 The intra-procedural information is produced and its existence
103 indicated by global_info_ready. Once this flag is set it is impossible
104 to change function from !reachable to reachable and thus
105 assemble_variable no longer call mark_referenced.
106
107 Finally the call-graph is topologically sorted and all reachable functions
108 that has not been completely inlined or are not external are output.
109
110 ??? It is possible that reference to function or variable is optimized
111 out. We can not deal with this nicely because topological order is not
112 suitable for it. For tree-ssa we may consider another pass doing
113 optimization and re-discovering reachable functions.
114
115 ??? Reorganize code so variables are output very last and only if they
116 really has been referenced by produced code, so we catch more cases
117 where reference has been optimized out.
118
119 - non-unit-at-a-time
120
121 All functions are variables are output as early as possible to conserve
122 memory consumption. This may or may not result in less memory used but
123 it is still needed for some legacy code that rely on particular ordering
124 of things output from the compiler.
125
126 Varpool data structures are not used and variables are output directly.
127
128 Functions are output early using call of
129 cgraph_assemble_pending_function from cgraph_finalize_function. The
130 decision on whether function is needed is made more conservative so
131 uninlininable static functions are needed too. During the call-graph
132 construction the edge destinations are not marked as reachable and it
133 is completely relied upn assemble_variable to mark them. */
134
135
136 #include "config.h"
137 #include "system.h"
138 #include "coretypes.h"
139 #include "tm.h"
140 #include "tree.h"
141 #include "rtl.h"
142 #include "tree-flow.h"
143 #include "tree-inline.h"
144 #include "langhooks.h"
145 #include "pointer-set.h"
146 #include "toplev.h"
147 #include "flags.h"
148 #include "ggc.h"
149 #include "debug.h"
150 #include "target.h"
151 #include "cgraph.h"
152 #include "diagnostic.h"
153 #include "timevar.h"
154 #include "params.h"
155 #include "fibheap.h"
156 #include "c-common.h"
157 #include "intl.h"
158 #include "function.h"
159 #include "ipa-prop.h"
160 #include "tree-gimple.h"
161 #include "tree-pass.h"
162 #include "output.h"
163
164 static void cgraph_expand_all_functions (void);
165 static void cgraph_mark_functions_to_output (void);
166 static void cgraph_expand_function (struct cgraph_node *);
167 static void cgraph_output_pending_asms (void);
168
169 static FILE *cgraph_dump_file;
170
171 /* Determine if function DECL is needed. That is, visible to something
172 either outside this translation unit, something magic in the system
173 configury, or (if not doing unit-at-a-time) to something we havn't
174 seen yet. */
175
176 static bool
177 decide_is_function_needed (struct cgraph_node *node, tree decl)
178 {
179 tree origin;
180 if (MAIN_NAME_P (DECL_NAME (decl))
181 && TREE_PUBLIC (decl))
182 {
183 node->local.externally_visible = true;
184 return true;
185 }
186
187 /* If the user told us it is used, then it must be so. */
188 if (node->local.externally_visible)
189 return true;
190
191 if (!flag_unit_at_a_time && lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
192 return true;
193
194 /* ??? If the assembler name is set by hand, it is possible to assemble
195 the name later after finalizing the function and the fact is noticed
196 in assemble_name then. This is arguably a bug. */
197 if (DECL_ASSEMBLER_NAME_SET_P (decl)
198 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
199 return true;
200
201 /* If we decided it was needed before, but at the time we didn't have
202 the body of the function available, then it's still needed. We have
203 to go back and re-check its dependencies now. */
204 if (node->needed)
205 return true;
206
207 /* Externally visible functions must be output. The exception is
208 COMDAT functions that must be output only when they are needed.
209
210 When not optimizing, also output the static functions. (see
211 PR24561), but don't do so for always_inline functions, functions
212 declared inline and nested functions. These was optimized out
213 in the original implementation and it is unclear whether we want
214 to change the behavior here. */
215 if (((TREE_PUBLIC (decl)
216 || (!optimize && !node->local.disregard_inline_limits
217 && !DECL_DECLARED_INLINE_P (decl)
218 && !node->origin))
219 && !flag_whole_program)
220 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
221 return true;
222
223 /* Constructors and destructors are reachable from the runtime by
224 some mechanism. */
225 if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
226 return true;
227
228 if (flag_unit_at_a_time)
229 return false;
230
231 /* If not doing unit at a time, then we'll only defer this function
232 if its marked for inlining. Otherwise we want to emit it now. */
233
234 /* "extern inline" functions are never output locally. */
235 if (DECL_EXTERNAL (decl))
236 return false;
237 /* Nested functions of extern inline function shall not be emit unless
238 we inlined the origin. */
239 for (origin = decl_function_context (decl); origin;
240 origin = decl_function_context (origin))
241 if (DECL_EXTERNAL (origin))
242 return false;
243 /* We want to emit COMDAT functions only when absolutely necessary. */
244 if (DECL_COMDAT (decl))
245 return false;
246 if (!DECL_INLINE (decl)
247 || (!node->local.disregard_inline_limits
248 /* When declared inline, defer even the uninlinable functions.
249 This allows them to be eliminated when unused. */
250 && !DECL_DECLARED_INLINE_P (decl)
251 && (!node->local.inlinable || !cgraph_default_inline_p (node, NULL))))
252 return true;
253
254 return false;
255 }
256
257 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
258 functions into callgraph in a way so they look like ordinary reachable
259 functions inserted into callgraph already at construction time. */
260
261 bool
262 cgraph_process_new_functions (void)
263 {
264 bool output = false;
265 tree fndecl;
266 struct cgraph_node *node;
267
268 /* Note that this queue may grow as its being processed, as the new
269 functions may generate new ones. */
270 while (cgraph_new_nodes)
271 {
272 node = cgraph_new_nodes;
273 fndecl = node->decl;
274 cgraph_new_nodes = cgraph_new_nodes->next_needed;
275 switch (cgraph_state)
276 {
277 case CGRAPH_STATE_CONSTRUCTION:
278 /* At construction time we just need to finalize function and move
279 it into reachable functions list. */
280
281 node->next_needed = NULL;
282 node->needed = node->reachable = false;
283 cgraph_finalize_function (fndecl, false);
284 cgraph_mark_reachable_node (node);
285 output = true;
286 break;
287
288 case CGRAPH_STATE_IPA:
289 case CGRAPH_STATE_IPA_SSA:
290 /* When IPA optimization already started, do all essential
291 transformations that has been already performed on the whole
292 cgraph but not on this function. */
293
294 tree_register_cfg_hooks ();
295 if (!node->analyzed)
296 cgraph_analyze_function (node);
297 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
298 current_function_decl = fndecl;
299 node->local.inlinable = tree_inlinable_function_p (fndecl);
300 node->local.self_insns = estimate_num_insns (fndecl);
301 node->local.disregard_inline_limits
302 = lang_hooks.tree_inlining.disregard_inline_limits (fndecl);
303 /* Inlining characteristics are maintained by the
304 cgraph_mark_inline. */
305 node->global.insns = node->local.self_insns;
306 if (flag_really_no_inline && !node->local.disregard_inline_limits)
307 node->local.inlinable = 0;
308 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
309 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
310 /* When not optimizing, be sure we run early local passes anyway
311 to expand OMP. */
312 || !optimize)
313 execute_pass_list (pass_early_local_passes.sub);
314 free_dominance_info (CDI_POST_DOMINATORS);
315 free_dominance_info (CDI_DOMINATORS);
316 pop_cfun ();
317 current_function_decl = NULL;
318 break;
319
320 case CGRAPH_STATE_EXPANSION:
321 /* Functions created during expansion shall be compiled
322 directly. */
323 node->output = 0;
324 cgraph_expand_function (node);
325 break;
326
327 default:
328 gcc_unreachable ();
329 break;
330 }
331 }
332 return output;
333 }
334
335 /* When not doing unit-at-a-time, output all functions enqueued.
336 Return true when such a functions were found. */
337
338 static bool
339 cgraph_assemble_pending_functions (void)
340 {
341 bool output = false;
342
343 if (flag_unit_at_a_time)
344 return false;
345
346 cgraph_output_pending_asms ();
347
348 while (cgraph_nodes_queue)
349 {
350 struct cgraph_node *n = cgraph_nodes_queue;
351
352 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
353 n->next_needed = NULL;
354 if (!n->global.inlined_to
355 && !n->alias
356 && !DECL_EXTERNAL (n->decl))
357 {
358 cgraph_expand_function (n);
359 output = true;
360 }
361 output |= cgraph_process_new_functions ();
362 }
363
364 return output;
365 }
366
367
368 /* As an GCC extension we allow redefinition of the function. The
369 semantics when both copies of bodies differ is not well defined.
370 We replace the old body with new body so in unit at a time mode
371 we always use new body, while in normal mode we may end up with
372 old body inlined into some functions and new body expanded and
373 inlined in others.
374
375 ??? It may make more sense to use one body for inlining and other
376 body for expanding the function but this is difficult to do. */
377
378 static void
379 cgraph_reset_node (struct cgraph_node *node)
380 {
381 /* If node->output is set, then this is a unit-at-a-time compilation
382 and we have already begun whole-unit analysis. This is *not*
383 testing for whether we've already emitted the function. That
384 case can be sort-of legitimately seen with real function
385 redefinition errors. I would argue that the front end should
386 never present us with such a case, but don't enforce that for now. */
387 gcc_assert (!node->output);
388
389 /* Reset our data structures so we can analyze the function again. */
390 memset (&node->local, 0, sizeof (node->local));
391 memset (&node->global, 0, sizeof (node->global));
392 memset (&node->rtl, 0, sizeof (node->rtl));
393 node->analyzed = false;
394 node->local.redefined_extern_inline = true;
395 node->local.finalized = false;
396
397 if (!flag_unit_at_a_time)
398 {
399 struct cgraph_node *n, *next;
400
401 for (n = cgraph_nodes; n; n = next)
402 {
403 next = n->next;
404 if (n->global.inlined_to == node)
405 cgraph_remove_node (n);
406 }
407 }
408
409 cgraph_node_remove_callees (node);
410
411 /* We may need to re-queue the node for assembling in case
412 we already proceeded it and ignored as not needed. */
413 if (node->reachable && !flag_unit_at_a_time)
414 {
415 struct cgraph_node *n;
416
417 for (n = cgraph_nodes_queue; n; n = n->next_needed)
418 if (n == node)
419 break;
420 if (!n)
421 node->reachable = 0;
422 }
423 }
424
425 static void
426 cgraph_lower_function (struct cgraph_node *node)
427 {
428 if (node->lowered)
429 return;
430 tree_lowering_passes (node->decl);
431 node->lowered = true;
432 }
433
434 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
435 logic in effect. If NESTED is true, then our caller cannot stand to have
436 the garbage collector run at the moment. We would need to either create
437 a new GC context, or just not compile right now. */
438
439 void
440 cgraph_finalize_function (tree decl, bool nested)
441 {
442 struct cgraph_node *node = cgraph_node (decl);
443
444 if (node->local.finalized)
445 cgraph_reset_node (node);
446
447 node->pid = cgraph_max_pid ++;
448 notice_global_symbol (decl);
449 node->decl = decl;
450 node->local.finalized = true;
451 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
452 if (node->nested)
453 lower_nested_functions (decl);
454 gcc_assert (!node->nested);
455
456 /* If not unit at a time, then we need to create the call graph
457 now, so that called functions can be queued and emitted now. */
458 if (!flag_unit_at_a_time)
459 cgraph_analyze_function (node);
460
461 if (decide_is_function_needed (node, decl))
462 cgraph_mark_needed_node (node);
463
464 /* Since we reclaim unreachable nodes at the end of every language
465 level unit, we need to be conservative about possible entry points
466 there. */
467 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)))
468 cgraph_mark_reachable_node (node);
469
470 /* If not unit at a time, go ahead and emit everything we've found
471 to be reachable at this time. */
472 if (!nested)
473 {
474 if (!cgraph_assemble_pending_functions ())
475 ggc_collect ();
476 }
477
478 /* If we've not yet emitted decl, tell the debug info about it. */
479 if (!TREE_ASM_WRITTEN (decl))
480 (*debug_hooks->deferred_inline_function) (decl);
481
482 /* Possibly warn about unused parameters. */
483 if (warn_unused_parameter)
484 do_warn_unused_parameter (decl);
485 }
486
487 /* Verify cgraph nodes of given cgraph node. */
488 void
489 verify_cgraph_node (struct cgraph_node *node)
490 {
491 struct cgraph_edge *e;
492 struct cgraph_node *main_clone;
493 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
494 basic_block this_block;
495 block_stmt_iterator bsi;
496 bool error_found = false;
497
498 if (errorcount || sorrycount)
499 return;
500
501 timevar_push (TV_CGRAPH_VERIFY);
502 for (e = node->callees; e; e = e->next_callee)
503 if (e->aux)
504 {
505 error ("aux field set for edge %s->%s",
506 cgraph_node_name (e->caller), cgraph_node_name (e->callee));
507 error_found = true;
508 }
509 if (node->count < 0)
510 {
511 error ("Execution count is negative");
512 error_found = true;
513 }
514 for (e = node->callers; e; e = e->next_caller)
515 {
516 if (e->count < 0)
517 {
518 error ("caller edge count is negative");
519 error_found = true;
520 }
521 if (!e->inline_failed)
522 {
523 if (node->global.inlined_to
524 != (e->caller->global.inlined_to
525 ? e->caller->global.inlined_to : e->caller))
526 {
527 error ("inlined_to pointer is wrong");
528 error_found = true;
529 }
530 if (node->callers->next_caller)
531 {
532 error ("multiple inline callers");
533 error_found = true;
534 }
535 }
536 else
537 if (node->global.inlined_to)
538 {
539 error ("inlined_to pointer set for noninline callers");
540 error_found = true;
541 }
542 }
543 if (!node->callers && node->global.inlined_to)
544 {
545 error ("inlined_to pointer is set but no predecessors found");
546 error_found = true;
547 }
548 if (node->global.inlined_to == node)
549 {
550 error ("inlined_to pointer refers to itself");
551 error_found = true;
552 }
553
554 for (main_clone = cgraph_node (node->decl); main_clone;
555 main_clone = main_clone->next_clone)
556 if (main_clone == node)
557 break;
558 if (!cgraph_node (node->decl))
559 {
560 error ("node not found in cgraph_hash");
561 error_found = true;
562 }
563
564 if (node->analyzed
565 && DECL_SAVED_TREE (node->decl) && !TREE_ASM_WRITTEN (node->decl)
566 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to))
567 {
568 if (this_cfun->cfg)
569 {
570 /* The nodes we're interested in are never shared, so walk
571 the tree ignoring duplicates. */
572 struct pointer_set_t *visited_nodes = pointer_set_create ();
573 /* Reach the trees by walking over the CFG, and note the
574 enclosing basic-blocks in the call edges. */
575 FOR_EACH_BB_FN (this_block, this_cfun)
576 for (bsi = bsi_start (this_block); !bsi_end_p (bsi); bsi_next (&bsi))
577 {
578 tree stmt = bsi_stmt (bsi);
579 tree call = get_call_expr_in (stmt);
580 tree decl;
581 if (call && (decl = get_callee_fndecl (call)))
582 {
583 struct cgraph_edge *e = cgraph_edge (node, stmt);
584 if (e)
585 {
586 if (e->aux)
587 {
588 error ("shared call_stmt:");
589 debug_generic_stmt (stmt);
590 error_found = true;
591 }
592 if (e->callee->decl != cgraph_node (decl)->decl
593 && e->inline_failed)
594 {
595 error ("edge points to wrong declaration:");
596 debug_tree (e->callee->decl);
597 fprintf (stderr," Instead of:");
598 debug_tree (decl);
599 }
600 e->aux = (void *)1;
601 }
602 else
603 {
604 error ("missing callgraph edge for call stmt:");
605 debug_generic_stmt (stmt);
606 error_found = true;
607 }
608 }
609 }
610 pointer_set_destroy (visited_nodes);
611 }
612 else
613 /* No CFG available?! */
614 gcc_unreachable ();
615
616 for (e = node->callees; e; e = e->next_callee)
617 {
618 if (!e->aux)
619 {
620 error ("edge %s->%s has no corresponding call_stmt",
621 cgraph_node_name (e->caller),
622 cgraph_node_name (e->callee));
623 debug_generic_stmt (e->call_stmt);
624 error_found = true;
625 }
626 e->aux = 0;
627 }
628 }
629 if (error_found)
630 {
631 dump_cgraph_node (stderr, node);
632 internal_error ("verify_cgraph_node failed");
633 }
634 timevar_pop (TV_CGRAPH_VERIFY);
635 }
636
637 /* Verify whole cgraph structure. */
638 void
639 verify_cgraph (void)
640 {
641 struct cgraph_node *node;
642
643 if (sorrycount || errorcount)
644 return;
645
646 for (node = cgraph_nodes; node; node = node->next)
647 verify_cgraph_node (node);
648 }
649
650 /* Output all asm statements we have stored up to be output. */
651
652 static void
653 cgraph_output_pending_asms (void)
654 {
655 struct cgraph_asm_node *can;
656
657 if (errorcount || sorrycount)
658 return;
659
660 for (can = cgraph_asm_nodes; can; can = can->next)
661 assemble_asm (can->asm_str);
662 cgraph_asm_nodes = NULL;
663 }
664
665 /* Analyze the function scheduled to be output. */
666 void
667 cgraph_analyze_function (struct cgraph_node *node)
668 {
669 tree decl = node->decl;
670
671 current_function_decl = decl;
672 push_cfun (DECL_STRUCT_FUNCTION (decl));
673 cgraph_lower_function (node);
674
675 node->local.estimated_self_stack_size = estimated_stack_frame_size ();
676 node->global.estimated_stack_size = node->local.estimated_self_stack_size;
677 node->global.stack_frame_offset = 0;
678 node->local.inlinable = tree_inlinable_function_p (decl);
679 if (!flag_unit_at_a_time)
680 node->local.self_insns = estimate_num_insns (decl);
681 if (node->local.inlinable)
682 node->local.disregard_inline_limits
683 = lang_hooks.tree_inlining.disregard_inline_limits (decl);
684 if (flag_really_no_inline && !node->local.disregard_inline_limits)
685 node->local.inlinable = 0;
686 /* Inlining characteristics are maintained by the cgraph_mark_inline. */
687 node->global.insns = node->local.self_insns;
688 if (!flag_unit_at_a_time)
689 {
690 bitmap_obstack_initialize (NULL);
691 tree_register_cfg_hooks ();
692 execute_pass_list (pass_early_local_passes.sub);
693 free_dominance_info (CDI_POST_DOMINATORS);
694 free_dominance_info (CDI_DOMINATORS);
695 bitmap_obstack_release (NULL);
696 }
697
698 node->analyzed = true;
699 pop_cfun ();
700 current_function_decl = NULL;
701 }
702
703 /* Look for externally_visible and used attributes and mark cgraph nodes
704 accordingly.
705
706 We cannot mark the nodes at the point the attributes are processed (in
707 handle_*_attribute) because the copy of the declarations available at that
708 point may not be canonical. For example, in:
709
710 void f();
711 void f() __attribute__((used));
712
713 the declaration we see in handle_used_attribute will be the second
714 declaration -- but the front end will subsequently merge that declaration
715 with the original declaration and discard the second declaration.
716
717 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
718
719 void f() {}
720 void f() __attribute__((externally_visible));
721
722 is valid.
723
724 So, we walk the nodes at the end of the translation unit, applying the
725 attributes at that point. */
726
727 static void
728 process_function_and_variable_attributes (struct cgraph_node *first,
729 struct varpool_node *first_var)
730 {
731 struct cgraph_node *node;
732 struct varpool_node *vnode;
733
734 for (node = cgraph_nodes; node != first; node = node->next)
735 {
736 tree decl = node->decl;
737 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
738 {
739 mark_decl_referenced (decl);
740 if (node->local.finalized)
741 cgraph_mark_needed_node (node);
742 }
743 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
744 {
745 if (! TREE_PUBLIC (node->decl))
746 warning (OPT_Wattributes,
747 "%J%<externally_visible%> attribute have effect only on public objects",
748 node->decl);
749 else
750 {
751 if (node->local.finalized)
752 cgraph_mark_needed_node (node);
753 node->local.externally_visible = true;
754 }
755 }
756 }
757 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
758 {
759 tree decl = vnode->decl;
760 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
761 {
762 mark_decl_referenced (decl);
763 if (vnode->finalized)
764 varpool_mark_needed_node (vnode);
765 }
766 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
767 {
768 if (! TREE_PUBLIC (vnode->decl))
769 warning (OPT_Wattributes,
770 "%J%<externally_visible%> attribute have effect only on public objects",
771 vnode->decl);
772 else
773 {
774 if (vnode->finalized)
775 varpool_mark_needed_node (vnode);
776 vnode->externally_visible = true;
777 }
778 }
779 }
780 }
781
782 /* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
783 each reachable functions) and build cgraph.
784 The function can be called multiple times after inserting new nodes
785 into beggining of queue. Just the new part of queue is re-scanned then. */
786
787 static void
788 cgraph_analyze_functions (void)
789 {
790 /* Keep track of already processed nodes when called multiple times for
791 intermodule optimization. */
792 static struct cgraph_node *first_analyzed;
793 struct cgraph_node *first_processed = first_analyzed;
794 static struct varpool_node *first_analyzed_var;
795 struct cgraph_node *node, *next;
796
797 process_function_and_variable_attributes (first_processed,
798 first_analyzed_var);
799 first_processed = cgraph_nodes;
800 first_analyzed_var = varpool_nodes;
801 varpool_analyze_pending_decls ();
802 if (cgraph_dump_file)
803 {
804 fprintf (cgraph_dump_file, "Initial entry points:");
805 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
806 if (node->needed && DECL_SAVED_TREE (node->decl))
807 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
808 fprintf (cgraph_dump_file, "\n");
809 }
810 cgraph_process_new_functions ();
811
812 /* Propagate reachability flag and lower representation of all reachable
813 functions. In the future, lowering will introduce new functions and
814 new entry points on the way (by template instantiation and virtual
815 method table generation for instance). */
816 while (cgraph_nodes_queue)
817 {
818 struct cgraph_edge *edge;
819 tree decl = cgraph_nodes_queue->decl;
820
821 node = cgraph_nodes_queue;
822 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
823 node->next_needed = NULL;
824
825 /* ??? It is possible to create extern inline function and later using
826 weak alias attribute to kill its body. See
827 gcc.c-torture/compile/20011119-1.c */
828 if (!DECL_SAVED_TREE (decl))
829 {
830 cgraph_reset_node (node);
831 continue;
832 }
833
834 gcc_assert (!node->analyzed && node->reachable);
835 gcc_assert (DECL_SAVED_TREE (decl));
836
837 cgraph_analyze_function (node);
838
839 for (edge = node->callees; edge; edge = edge->next_callee)
840 if (!edge->callee->reachable)
841 cgraph_mark_reachable_node (edge->callee);
842
843 /* We finalize local static variables during constructing callgraph
844 edges. Process their attributes too. */
845 process_function_and_variable_attributes (first_processed,
846 first_analyzed_var);
847 first_processed = cgraph_nodes;
848 first_analyzed_var = varpool_nodes;
849 varpool_analyze_pending_decls ();
850 cgraph_process_new_functions ();
851 }
852
853 /* Collect entry points to the unit. */
854 if (cgraph_dump_file)
855 {
856 fprintf (cgraph_dump_file, "Unit entry points:");
857 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
858 if (node->needed && DECL_SAVED_TREE (node->decl))
859 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
860 fprintf (cgraph_dump_file, "\n\nInitial ");
861 dump_cgraph (cgraph_dump_file);
862 }
863
864 if (cgraph_dump_file)
865 fprintf (cgraph_dump_file, "\nReclaiming functions:");
866
867 for (node = cgraph_nodes; node != first_analyzed; node = next)
868 {
869 tree decl = node->decl;
870 next = node->next;
871
872 if (node->local.finalized && !DECL_SAVED_TREE (decl))
873 cgraph_reset_node (node);
874
875 if (!node->reachable && DECL_SAVED_TREE (decl))
876 {
877 if (cgraph_dump_file)
878 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
879 cgraph_remove_node (node);
880 continue;
881 }
882 else
883 node->next_needed = NULL;
884 gcc_assert (!node->local.finalized || DECL_SAVED_TREE (decl));
885 gcc_assert (node->analyzed == node->local.finalized);
886 }
887 if (cgraph_dump_file)
888 {
889 fprintf (cgraph_dump_file, "\n\nReclaimed ");
890 dump_cgraph (cgraph_dump_file);
891 }
892 first_analyzed = cgraph_nodes;
893 ggc_collect ();
894 }
895
896 /* Analyze the whole compilation unit once it is parsed completely. */
897
898 void
899 cgraph_finalize_compilation_unit (void)
900 {
901 if (errorcount || sorrycount)
902 return;
903
904 finish_aliases_1 ();
905
906 if (!flag_unit_at_a_time)
907 {
908 cgraph_output_pending_asms ();
909 cgraph_assemble_pending_functions ();
910 varpool_output_debug_info ();
911 return;
912 }
913
914 if (!quiet_flag)
915 {
916 fprintf (stderr, "\nAnalyzing compilation unit\n");
917 fflush (stderr);
918 }
919
920 timevar_push (TV_CGRAPH);
921 cgraph_analyze_functions ();
922 timevar_pop (TV_CGRAPH);
923 }
924 /* Figure out what functions we want to assemble. */
925
926 static void
927 cgraph_mark_functions_to_output (void)
928 {
929 struct cgraph_node *node;
930
931 for (node = cgraph_nodes; node; node = node->next)
932 {
933 tree decl = node->decl;
934 struct cgraph_edge *e;
935
936 gcc_assert (!node->output);
937
938 for (e = node->callers; e; e = e->next_caller)
939 if (e->inline_failed)
940 break;
941
942 /* We need to output all local functions that are used and not
943 always inlined, as well as those that are reachable from
944 outside the current compilation unit. */
945 if (DECL_SAVED_TREE (decl)
946 && !node->global.inlined_to
947 && (node->needed
948 || (e && node->reachable))
949 && !TREE_ASM_WRITTEN (decl)
950 && !DECL_EXTERNAL (decl))
951 node->output = 1;
952 else
953 {
954 /* We should've reclaimed all functions that are not needed. */
955 #ifdef ENABLE_CHECKING
956 if (!node->global.inlined_to && DECL_SAVED_TREE (decl)
957 && !DECL_EXTERNAL (decl))
958 {
959 dump_cgraph_node (stderr, node);
960 internal_error ("failed to reclaim unneeded function");
961 }
962 #endif
963 gcc_assert (node->global.inlined_to || !DECL_SAVED_TREE (decl)
964 || DECL_EXTERNAL (decl));
965
966 }
967
968 }
969 }
970
971 /* Expand function specified by NODE. */
972
973 static void
974 cgraph_expand_function (struct cgraph_node *node)
975 {
976 tree decl = node->decl;
977
978 /* We ought to not compile any inline clones. */
979 gcc_assert (!node->global.inlined_to);
980
981 if (flag_unit_at_a_time)
982 announce_function (decl);
983
984 gcc_assert (node->lowered);
985
986 /* Generate RTL for the body of DECL. */
987 lang_hooks.callgraph.expand_function (decl);
988
989 /* Make sure that BE didn't give up on compiling. */
990 /* ??? Can happen with nested function of extern inline. */
991 gcc_assert (TREE_ASM_WRITTEN (node->decl));
992
993 current_function_decl = NULL;
994 if (!cgraph_preserve_function_body_p (node->decl))
995 {
996 cgraph_release_function_body (node);
997 /* Eliminate all call edges. This is important so the call_expr no longer
998 points to the dead function body. */
999 cgraph_node_remove_callees (node);
1000 }
1001
1002 cgraph_function_flags_ready = true;
1003 }
1004
1005 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
1006
1007 bool
1008 cgraph_inline_p (struct cgraph_edge *e, const char **reason)
1009 {
1010 *reason = e->inline_failed;
1011 return !e->inline_failed;
1012 }
1013
1014
1015
1016 /* Expand all functions that must be output.
1017
1018 Attempt to topologically sort the nodes so function is output when
1019 all called functions are already assembled to allow data to be
1020 propagated across the callgraph. Use a stack to get smaller distance
1021 between a function and its callees (later we may choose to use a more
1022 sophisticated algorithm for function reordering; we will likely want
1023 to use subsections to make the output functions appear in top-down
1024 order). */
1025
1026 static void
1027 cgraph_expand_all_functions (void)
1028 {
1029 struct cgraph_node *node;
1030 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1031 int order_pos = 0, new_order_pos = 0;
1032 int i;
1033
1034 order_pos = cgraph_postorder (order);
1035 gcc_assert (order_pos == cgraph_n_nodes);
1036
1037 /* Garbage collector may remove inline clones we eliminate during
1038 optimization. So we must be sure to not reference them. */
1039 for (i = 0; i < order_pos; i++)
1040 if (order[i]->output)
1041 order[new_order_pos++] = order[i];
1042
1043 for (i = new_order_pos - 1; i >= 0; i--)
1044 {
1045 node = order[i];
1046 if (node->output)
1047 {
1048 gcc_assert (node->reachable);
1049 node->output = 0;
1050 cgraph_expand_function (node);
1051 }
1052 }
1053 cgraph_process_new_functions ();
1054
1055 free (order);
1056
1057 }
1058
1059 /* This is used to sort the node types by the cgraph order number. */
1060
1061 struct cgraph_order_sort
1062 {
1063 enum { ORDER_UNDEFINED = 0, ORDER_FUNCTION, ORDER_VAR, ORDER_ASM } kind;
1064 union
1065 {
1066 struct cgraph_node *f;
1067 struct varpool_node *v;
1068 struct cgraph_asm_node *a;
1069 } u;
1070 };
1071
1072 /* Output all functions, variables, and asm statements in the order
1073 according to their order fields, which is the order in which they
1074 appeared in the file. This implements -fno-toplevel-reorder. In
1075 this mode we may output functions and variables which don't really
1076 need to be output. */
1077
1078 static void
1079 cgraph_output_in_order (void)
1080 {
1081 int max;
1082 size_t size;
1083 struct cgraph_order_sort *nodes;
1084 int i;
1085 struct cgraph_node *pf;
1086 struct varpool_node *pv;
1087 struct cgraph_asm_node *pa;
1088
1089 max = cgraph_order;
1090 size = max * sizeof (struct cgraph_order_sort);
1091 nodes = (struct cgraph_order_sort *) alloca (size);
1092 memset (nodes, 0, size);
1093
1094 varpool_analyze_pending_decls ();
1095
1096 for (pf = cgraph_nodes; pf; pf = pf->next)
1097 {
1098 if (pf->output)
1099 {
1100 i = pf->order;
1101 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1102 nodes[i].kind = ORDER_FUNCTION;
1103 nodes[i].u.f = pf;
1104 }
1105 }
1106
1107 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
1108 {
1109 i = pv->order;
1110 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1111 nodes[i].kind = ORDER_VAR;
1112 nodes[i].u.v = pv;
1113 }
1114
1115 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1116 {
1117 i = pa->order;
1118 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1119 nodes[i].kind = ORDER_ASM;
1120 nodes[i].u.a = pa;
1121 }
1122
1123 for (i = 0; i < max; ++i)
1124 {
1125 switch (nodes[i].kind)
1126 {
1127 case ORDER_FUNCTION:
1128 nodes[i].u.f->output = 0;
1129 cgraph_expand_function (nodes[i].u.f);
1130 break;
1131
1132 case ORDER_VAR:
1133 varpool_assemble_decl (nodes[i].u.v);
1134 break;
1135
1136 case ORDER_ASM:
1137 assemble_asm (nodes[i].u.a->asm_str);
1138 break;
1139
1140 case ORDER_UNDEFINED:
1141 break;
1142
1143 default:
1144 gcc_unreachable ();
1145 }
1146 }
1147
1148 cgraph_asm_nodes = NULL;
1149 }
1150
1151 /* Return true when function body of DECL still needs to be kept around
1152 for later re-use. */
1153 bool
1154 cgraph_preserve_function_body_p (tree decl)
1155 {
1156 struct cgraph_node *node;
1157 if (!cgraph_global_info_ready)
1158 return (flag_really_no_inline
1159 ? lang_hooks.tree_inlining.disregard_inline_limits (decl)
1160 : DECL_INLINE (decl));
1161 /* Look if there is any clone around. */
1162 for (node = cgraph_node (decl); node; node = node->next_clone)
1163 if (node->global.inlined_to)
1164 return true;
1165 return false;
1166 }
1167
1168 static void
1169 ipa_passes (void)
1170 {
1171 cfun = NULL;
1172 current_function_decl = NULL;
1173 tree_register_cfg_hooks ();
1174 bitmap_obstack_initialize (NULL);
1175 execute_ipa_pass_list (all_ipa_passes);
1176 bitmap_obstack_release (NULL);
1177 }
1178
1179 /* Perform simple optimizations based on callgraph. */
1180
1181 void
1182 cgraph_optimize (void)
1183 {
1184 if (errorcount || sorrycount)
1185 return;
1186
1187 #ifdef ENABLE_CHECKING
1188 verify_cgraph ();
1189 #endif
1190 if (!flag_unit_at_a_time)
1191 {
1192 cgraph_assemble_pending_functions ();
1193 cgraph_process_new_functions ();
1194 cgraph_state = CGRAPH_STATE_FINISHED;
1195 cgraph_output_pending_asms ();
1196 varpool_assemble_pending_decls ();
1197 varpool_output_debug_info ();
1198 return;
1199 }
1200
1201 /* Frontend may output common variables after the unit has been finalized.
1202 It is safe to deal with them here as they are always zero initialized. */
1203 varpool_analyze_pending_decls ();
1204 cgraph_analyze_functions ();
1205
1206 timevar_push (TV_CGRAPHOPT);
1207 if (pre_ipa_mem_report)
1208 {
1209 fprintf (stderr, "Memory consumption before IPA\n");
1210 dump_memory_report (false);
1211 }
1212 if (!quiet_flag)
1213 fprintf (stderr, "Performing interprocedural optimizations\n");
1214 cgraph_state = CGRAPH_STATE_IPA;
1215
1216 /* Don't run the IPA passes if there was any error or sorry messages. */
1217 if (errorcount == 0 && sorrycount == 0)
1218 ipa_passes ();
1219
1220 /* This pass remove bodies of extern inline functions we never inlined.
1221 Do this later so other IPA passes see what is really going on. */
1222 cgraph_remove_unreachable_nodes (false, dump_file);
1223 cgraph_global_info_ready = true;
1224 if (cgraph_dump_file)
1225 {
1226 fprintf (cgraph_dump_file, "Optimized ");
1227 dump_cgraph (cgraph_dump_file);
1228 dump_varpool (cgraph_dump_file);
1229 }
1230 if (post_ipa_mem_report)
1231 {
1232 fprintf (stderr, "Memory consumption after IPA\n");
1233 dump_memory_report (false);
1234 }
1235 timevar_pop (TV_CGRAPHOPT);
1236
1237 /* Output everything. */
1238 if (!quiet_flag)
1239 fprintf (stderr, "Assembling functions:\n");
1240 #ifdef ENABLE_CHECKING
1241 verify_cgraph ();
1242 #endif
1243
1244 cgraph_mark_functions_to_output ();
1245
1246 cgraph_state = CGRAPH_STATE_EXPANSION;
1247 if (!flag_toplevel_reorder)
1248 cgraph_output_in_order ();
1249 else
1250 {
1251 cgraph_output_pending_asms ();
1252
1253 cgraph_expand_all_functions ();
1254 varpool_remove_unreferenced_decls ();
1255
1256 varpool_assemble_pending_decls ();
1257 varpool_output_debug_info ();
1258 }
1259 cgraph_process_new_functions ();
1260 cgraph_state = CGRAPH_STATE_FINISHED;
1261
1262 if (cgraph_dump_file)
1263 {
1264 fprintf (cgraph_dump_file, "\nFinal ");
1265 dump_cgraph (cgraph_dump_file);
1266 }
1267 #ifdef ENABLE_CHECKING
1268 verify_cgraph ();
1269 /* Double check that all inline clones are gone and that all
1270 function bodies have been released from memory. */
1271 if (flag_unit_at_a_time
1272 && !(sorrycount || errorcount))
1273 {
1274 struct cgraph_node *node;
1275 bool error_found = false;
1276
1277 for (node = cgraph_nodes; node; node = node->next)
1278 if (node->analyzed
1279 && (node->global.inlined_to
1280 || DECL_SAVED_TREE (node->decl)))
1281 {
1282 error_found = true;
1283 dump_cgraph_node (stderr, node);
1284 }
1285 if (error_found)
1286 internal_error ("nodes with no released memory found");
1287 }
1288 #endif
1289 }
1290 /* Generate and emit a static constructor or destructor. WHICH must be
1291 one of 'I' or 'D'. BODY should be a STATEMENT_LIST containing
1292 GENERIC statements. */
1293
1294 void
1295 cgraph_build_static_cdtor (char which, tree body, int priority)
1296 {
1297 static int counter = 0;
1298 char which_buf[16];
1299 tree decl, name, resdecl;
1300
1301 sprintf (which_buf, "%c_%d", which, counter++);
1302 name = get_file_function_name (which_buf);
1303
1304 decl = build_decl (FUNCTION_DECL, name,
1305 build_function_type (void_type_node, void_list_node));
1306 current_function_decl = decl;
1307
1308 resdecl = build_decl (RESULT_DECL, NULL_TREE, void_type_node);
1309 DECL_ARTIFICIAL (resdecl) = 1;
1310 DECL_IGNORED_P (resdecl) = 1;
1311 DECL_RESULT (decl) = resdecl;
1312
1313 allocate_struct_function (decl);
1314
1315 TREE_STATIC (decl) = 1;
1316 TREE_USED (decl) = 1;
1317 DECL_ARTIFICIAL (decl) = 1;
1318 DECL_IGNORED_P (decl) = 1;
1319 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1320 DECL_SAVED_TREE (decl) = body;
1321 TREE_PUBLIC (decl) = ! targetm.have_ctors_dtors;
1322 DECL_UNINLINABLE (decl) = 1;
1323
1324 DECL_INITIAL (decl) = make_node (BLOCK);
1325 TREE_USED (DECL_INITIAL (decl)) = 1;
1326
1327 DECL_SOURCE_LOCATION (decl) = input_location;
1328 cfun->function_end_locus = input_location;
1329
1330 switch (which)
1331 {
1332 case 'I':
1333 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1334 break;
1335 case 'D':
1336 DECL_STATIC_DESTRUCTOR (decl) = 1;
1337 break;
1338 default:
1339 gcc_unreachable ();
1340 }
1341
1342 gimplify_function_tree (decl);
1343
1344 cgraph_add_new_function (decl, false);
1345 cgraph_mark_needed_node (cgraph_node (decl));
1346
1347 if (targetm.have_ctors_dtors)
1348 {
1349 void (*fn) (rtx, int);
1350
1351 if (which == 'I')
1352 fn = targetm.asm_out.constructor;
1353 else
1354 fn = targetm.asm_out.destructor;
1355 fn (XEXP (DECL_RTL (decl), 0), priority);
1356 }
1357 }
1358
1359 void
1360 init_cgraph (void)
1361 {
1362 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1363 }
1364
1365 /* The edges representing the callers of the NEW_VERSION node were
1366 fixed by cgraph_function_versioning (), now the call_expr in their
1367 respective tree code should be updated to call the NEW_VERSION. */
1368
1369 static void
1370 update_call_expr (struct cgraph_node *new_version)
1371 {
1372 struct cgraph_edge *e;
1373
1374 gcc_assert (new_version);
1375 for (e = new_version->callers; e; e = e->next_caller)
1376 /* Update the call expr on the edges
1377 to call the new version. */
1378 TREE_OPERAND (TREE_OPERAND (get_call_expr_in (e->call_stmt), 0), 0) = new_version->decl;
1379 }
1380
1381
1382 /* Create a new cgraph node which is the new version of
1383 OLD_VERSION node. REDIRECT_CALLERS holds the callers
1384 edges which should be redirected to point to
1385 NEW_VERSION. ALL the callees edges of OLD_VERSION
1386 are cloned to the new version node. Return the new
1387 version node. */
1388
1389 static struct cgraph_node *
1390 cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
1391 tree new_decl,
1392 VEC(cgraph_edge_p,heap) *redirect_callers)
1393 {
1394 struct cgraph_node *new_version;
1395 struct cgraph_edge *e, *new_e;
1396 struct cgraph_edge *next_callee;
1397 unsigned i;
1398
1399 gcc_assert (old_version);
1400
1401 new_version = cgraph_node (new_decl);
1402
1403 new_version->analyzed = true;
1404 new_version->local = old_version->local;
1405 new_version->global = old_version->global;
1406 new_version->rtl = new_version->rtl;
1407 new_version->reachable = true;
1408 new_version->count = old_version->count;
1409
1410 /* Clone the old node callees. Recursive calls are
1411 also cloned. */
1412 for (e = old_version->callees;e; e=e->next_callee)
1413 {
1414 new_e = cgraph_clone_edge (e, new_version, e->call_stmt, 0, e->loop_nest, true);
1415 new_e->count = e->count;
1416 }
1417 /* Fix recursive calls.
1418 If OLD_VERSION has a recursive call after the
1419 previous edge cloning, the new version will have an edge
1420 pointing to the old version, which is wrong;
1421 Redirect it to point to the new version. */
1422 for (e = new_version->callees ; e; e = next_callee)
1423 {
1424 next_callee = e->next_callee;
1425 if (e->callee == old_version)
1426 cgraph_redirect_edge_callee (e, new_version);
1427
1428 if (!next_callee)
1429 break;
1430 }
1431 for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
1432 {
1433 /* Redirect calls to the old version node to point to its new
1434 version. */
1435 cgraph_redirect_edge_callee (e, new_version);
1436 }
1437
1438 return new_version;
1439 }
1440
1441 /* Perform function versioning.
1442 Function versioning includes copying of the tree and
1443 a callgraph update (creating a new cgraph node and updating
1444 its callees and callers).
1445
1446 REDIRECT_CALLERS varray includes the edges to be redirected
1447 to the new version.
1448
1449 TREE_MAP is a mapping of tree nodes we want to replace with
1450 new ones (according to results of prior analysis).
1451 OLD_VERSION_NODE is the node that is versioned.
1452 It returns the new version's cgraph node. */
1453
1454 struct cgraph_node *
1455 cgraph_function_versioning (struct cgraph_node *old_version_node,
1456 VEC(cgraph_edge_p,heap) *redirect_callers,
1457 varray_type tree_map)
1458 {
1459 tree old_decl = old_version_node->decl;
1460 struct cgraph_node *new_version_node = NULL;
1461 tree new_decl;
1462
1463 if (!tree_versionable_function_p (old_decl))
1464 return NULL;
1465
1466 /* Make a new FUNCTION_DECL tree node for the
1467 new version. */
1468 new_decl = copy_node (old_decl);
1469
1470 /* Create the new version's call-graph node.
1471 and update the edges of the new node. */
1472 new_version_node =
1473 cgraph_copy_node_for_versioning (old_version_node, new_decl,
1474 redirect_callers);
1475
1476 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1477 tree_function_versioning (old_decl, new_decl, tree_map, false);
1478 /* Update the call_expr on the edges to call the new version node. */
1479 update_call_expr (new_version_node);
1480
1481 /* Update the new version's properties.
1482 Make The new version visible only within this translation unit.
1483 ??? We cannot use COMDAT linkage because there is no
1484 ABI support for this. */
1485 DECL_EXTERNAL (new_version_node->decl) = 0;
1486 DECL_ONE_ONLY (new_version_node->decl) = 0;
1487 TREE_PUBLIC (new_version_node->decl) = 0;
1488 DECL_COMDAT (new_version_node->decl) = 0;
1489 new_version_node->local.externally_visible = 0;
1490 new_version_node->local.local = 1;
1491 new_version_node->lowered = true;
1492 return new_version_node;
1493 }
1494
1495 /* Produce separate function body for inline clones so the offline copy can be
1496 modified without affecting them. */
1497 struct cgraph_node *
1498 save_inline_function_body (struct cgraph_node *node)
1499 {
1500 struct cgraph_node *first_clone;
1501
1502 gcc_assert (node == cgraph_node (node->decl));
1503
1504 cgraph_lower_function (node);
1505
1506 /* In non-unit-at-a-time we construct full fledged clone we never output to
1507 assembly file. This clone is pointed out by inline_decl of original function
1508 and inlining infrastructure knows how to deal with this. */
1509 if (!flag_unit_at_a_time)
1510 {
1511 struct cgraph_edge *e;
1512
1513 first_clone = cgraph_clone_node (node, node->count, 0, false);
1514 first_clone->needed = 0;
1515 first_clone->reachable = 1;
1516 /* Recursively clone all bodies. */
1517 for (e = first_clone->callees; e; e = e->next_callee)
1518 if (!e->inline_failed)
1519 cgraph_clone_inlined_nodes (e, true, false);
1520 }
1521 else
1522 first_clone = node->next_clone;
1523
1524 first_clone->decl = copy_node (node->decl);
1525 node->next_clone = NULL;
1526 if (!flag_unit_at_a_time)
1527 node->inline_decl = first_clone->decl;
1528 first_clone->prev_clone = NULL;
1529 cgraph_insert_node_to_hashtable (first_clone);
1530 gcc_assert (first_clone == cgraph_node (first_clone->decl));
1531
1532 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1533 tree_function_versioning (node->decl, first_clone->decl, NULL, true);
1534
1535 DECL_EXTERNAL (first_clone->decl) = 0;
1536 DECL_ONE_ONLY (first_clone->decl) = 0;
1537 TREE_PUBLIC (first_clone->decl) = 0;
1538 DECL_COMDAT (first_clone->decl) = 0;
1539
1540 for (node = first_clone->next_clone; node; node = node->next_clone)
1541 node->decl = first_clone->decl;
1542 #ifdef ENABLE_CHECKING
1543 verify_cgraph_node (first_clone);
1544 #endif
1545 return first_clone;
1546 }