tree-pas.h (TODO_remove_function): New flag.
[gcc.git] / gcc / cgraphunit.c
1 /* Callgraph based interprocedural optimizations.
2 Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
21
22 /* This module implements main driver of compilation process as well as
23 few basic interprocedural optimizers.
24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
35 (There is one exception needed for implementing GCC extern inline
36 function.)
37
38 - varpool_finalize_variable
39
40 This function has same behavior as the above but is used for static
41 variables.
42
43 - cgraph_finalize_compilation_unit
44
45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
47
48 In the unit-at-a-time the call-graph construction and local function
49 analysis takes place here. Bodies of unreachable functions are released
50 to conserve memory usage.
51
52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
54
55 - cgraph_optimize
56
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
61
62 - cgraph_mark_needed_node
63 - varpool_mark_needed_node
64
65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
69 used by C++ frontend to explicitly mark the keyed methods.
70
71 - analyze_expr callback
72
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
76
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
79
80 - expand_function callback
81
82 This function is used to expand function and pass it into RTL back-end.
83 Front-end should not make any assumptions about when this function can be
84 called. In particular cgraph_assemble_pending_functions,
85 varpool_assemble_pending_variables, cgraph_finalize_function,
86 varpool_finalize_function, cgraph_optimize can cause arbitrarily
87 previously finalized functions to be expanded.
88
89 We implement two compilation modes.
90
91 - unit-at-a-time: In this mode analyzing of all functions is deferred
92 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
93
94 In cgraph_finalize_compilation_unit the reachable functions are
95 analyzed. During analysis the call-graph edges from reachable
96 functions are constructed and their destinations are marked as
97 reachable. References to functions and variables are discovered too
98 and variables found to be needed output to the assembly file. Via
99 mark_referenced call in assemble_variable functions referenced by
100 static variables are noticed too.
101
102 The intra-procedural information is produced and its existence
103 indicated by global_info_ready. Once this flag is set it is impossible
104 to change function from !reachable to reachable and thus
105 assemble_variable no longer call mark_referenced.
106
107 Finally the call-graph is topologically sorted and all reachable functions
108 that has not been completely inlined or are not external are output.
109
110 ??? It is possible that reference to function or variable is optimized
111 out. We can not deal with this nicely because topological order is not
112 suitable for it. For tree-ssa we may consider another pass doing
113 optimization and re-discovering reachable functions.
114
115 ??? Reorganize code so variables are output very last and only if they
116 really has been referenced by produced code, so we catch more cases
117 where reference has been optimized out.
118
119 - non-unit-at-a-time
120
121 All functions are variables are output as early as possible to conserve
122 memory consumption. This may or may not result in less memory used but
123 it is still needed for some legacy code that rely on particular ordering
124 of things output from the compiler.
125
126 Varpool data structures are not used and variables are output directly.
127
128 Functions are output early using call of
129 cgraph_assemble_pending_function from cgraph_finalize_function. The
130 decision on whether function is needed is made more conservative so
131 uninlininable static functions are needed too. During the call-graph
132 construction the edge destinations are not marked as reachable and it
133 is completely relied upn assemble_variable to mark them. */
134
135
136 #include "config.h"
137 #include "system.h"
138 #include "coretypes.h"
139 #include "tm.h"
140 #include "tree.h"
141 #include "rtl.h"
142 #include "tree-flow.h"
143 #include "tree-inline.h"
144 #include "langhooks.h"
145 #include "pointer-set.h"
146 #include "toplev.h"
147 #include "flags.h"
148 #include "ggc.h"
149 #include "debug.h"
150 #include "target.h"
151 #include "cgraph.h"
152 #include "diagnostic.h"
153 #include "timevar.h"
154 #include "params.h"
155 #include "fibheap.h"
156 #include "c-common.h"
157 #include "intl.h"
158 #include "function.h"
159 #include "ipa-prop.h"
160 #include "tree-gimple.h"
161 #include "tree-pass.h"
162 #include "output.h"
163
164 static void cgraph_expand_all_functions (void);
165 static void cgraph_mark_functions_to_output (void);
166 static void cgraph_expand_function (struct cgraph_node *);
167 static void cgraph_output_pending_asms (void);
168
169 static FILE *cgraph_dump_file;
170
171 /* Determine if function DECL is needed. That is, visible to something
172 either outside this translation unit, something magic in the system
173 configury, or (if not doing unit-at-a-time) to something we havn't
174 seen yet. */
175
176 static bool
177 decide_is_function_needed (struct cgraph_node *node, tree decl)
178 {
179 tree origin;
180 if (MAIN_NAME_P (DECL_NAME (decl))
181 && TREE_PUBLIC (decl))
182 {
183 node->local.externally_visible = true;
184 return true;
185 }
186
187 /* If the user told us it is used, then it must be so. */
188 if (node->local.externally_visible)
189 return true;
190
191 if (!flag_unit_at_a_time && lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
192 return true;
193
194 /* ??? If the assembler name is set by hand, it is possible to assemble
195 the name later after finalizing the function and the fact is noticed
196 in assemble_name then. This is arguably a bug. */
197 if (DECL_ASSEMBLER_NAME_SET_P (decl)
198 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
199 return true;
200
201 /* If we decided it was needed before, but at the time we didn't have
202 the body of the function available, then it's still needed. We have
203 to go back and re-check its dependencies now. */
204 if (node->needed)
205 return true;
206
207 /* Externally visible functions must be output. The exception is
208 COMDAT functions that must be output only when they are needed.
209
210 When not optimizing, also output the static functions. (see
211 PR24561), but don't do so for always_inline functions, functions
212 declared inline and nested functions. These was optimized out
213 in the original implementation and it is unclear whether we want
214 to change the behavior here. */
215 if (((TREE_PUBLIC (decl)
216 || (!optimize && !node->local.disregard_inline_limits
217 && !DECL_DECLARED_INLINE_P (decl)
218 && !node->origin))
219 && !flag_whole_program)
220 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
221 return true;
222
223 /* Constructors and destructors are reachable from the runtime by
224 some mechanism. */
225 if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
226 return true;
227
228 if (flag_unit_at_a_time)
229 return false;
230
231 /* If not doing unit at a time, then we'll only defer this function
232 if its marked for inlining. Otherwise we want to emit it now. */
233
234 /* "extern inline" functions are never output locally. */
235 if (DECL_EXTERNAL (decl))
236 return false;
237 /* Nested functions of extern inline function shall not be emit unless
238 we inlined the origin. */
239 for (origin = decl_function_context (decl); origin;
240 origin = decl_function_context (origin))
241 if (DECL_EXTERNAL (origin))
242 return false;
243 /* We want to emit COMDAT functions only when absolutely necessary. */
244 if (DECL_COMDAT (decl))
245 return false;
246 if (!DECL_INLINE (decl)
247 || (!node->local.disregard_inline_limits
248 /* When declared inline, defer even the uninlinable functions.
249 This allows them to be eliminated when unused. */
250 && !DECL_DECLARED_INLINE_P (decl)
251 && (!node->local.inlinable || !cgraph_default_inline_p (node, NULL))))
252 return true;
253
254 return false;
255 }
256
257 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
258 functions into callgraph in a way so they look like ordinary reachable
259 functions inserted into callgraph already at construction time. */
260
261 bool
262 cgraph_process_new_functions (void)
263 {
264 bool output = false;
265 tree fndecl;
266 struct cgraph_node *node;
267
268 /* Note that this queue may grow as its being processed, as the new
269 functions may generate new ones. */
270 while (cgraph_new_nodes)
271 {
272 node = cgraph_new_nodes;
273 fndecl = node->decl;
274 cgraph_new_nodes = cgraph_new_nodes->next_needed;
275 switch (cgraph_state)
276 {
277 case CGRAPH_STATE_CONSTRUCTION:
278 /* At construction time we just need to finalize function and move
279 it into reachable functions list. */
280
281 node->next_needed = NULL;
282 cgraph_finalize_function (fndecl, false);
283 cgraph_mark_reachable_node (node);
284 output = true;
285 break;
286
287 case CGRAPH_STATE_IPA:
288 case CGRAPH_STATE_IPA_SSA:
289 /* When IPA optimization already started, do all essential
290 transformations that has been already performed on the whole
291 cgraph but not on this function. */
292
293 tree_register_cfg_hooks ();
294 if (!node->analyzed)
295 cgraph_analyze_function (node);
296 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
297 current_function_decl = fndecl;
298 node->local.inlinable = tree_inlinable_function_p (fndecl);
299 node->local.self_insns = estimate_num_insns (fndecl);
300 node->local.disregard_inline_limits
301 = lang_hooks.tree_inlining.disregard_inline_limits (fndecl);
302 /* Inlining characteristics are maintained by the
303 cgraph_mark_inline. */
304 node->global.insns = node->local.self_insns;
305 if (flag_really_no_inline && !node->local.disregard_inline_limits)
306 node->local.inlinable = 0;
307 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
308 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
309 /* When not optimizing, be sure we run early local passes anyway
310 to expand OMP. */
311 || !optimize)
312 execute_pass_list (pass_early_local_passes.sub);
313 free_dominance_info (CDI_POST_DOMINATORS);
314 free_dominance_info (CDI_DOMINATORS);
315 pop_cfun ();
316 current_function_decl = NULL;
317 break;
318
319 case CGRAPH_STATE_EXPANSION:
320 /* Functions created during expansion shall be compiled
321 directly. */
322 node->output = 0;
323 cgraph_expand_function (node);
324 break;
325
326 default:
327 gcc_unreachable ();
328 break;
329 }
330 }
331 return output;
332 }
333
334 /* When not doing unit-at-a-time, output all functions enqueued.
335 Return true when such a functions were found. */
336
337 static bool
338 cgraph_assemble_pending_functions (void)
339 {
340 bool output = false;
341
342 if (flag_unit_at_a_time)
343 return false;
344
345 cgraph_output_pending_asms ();
346
347 while (cgraph_nodes_queue)
348 {
349 struct cgraph_node *n = cgraph_nodes_queue;
350
351 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
352 n->next_needed = NULL;
353 if (!n->global.inlined_to
354 && !n->alias
355 && !DECL_EXTERNAL (n->decl))
356 {
357 cgraph_expand_function (n);
358 output = true;
359 }
360 output |= cgraph_process_new_functions ();
361 }
362
363 return output;
364 }
365
366
367 /* As an GCC extension we allow redefinition of the function. The
368 semantics when both copies of bodies differ is not well defined.
369 We replace the old body with new body so in unit at a time mode
370 we always use new body, while in normal mode we may end up with
371 old body inlined into some functions and new body expanded and
372 inlined in others.
373
374 ??? It may make more sense to use one body for inlining and other
375 body for expanding the function but this is difficult to do. */
376
377 static void
378 cgraph_reset_node (struct cgraph_node *node)
379 {
380 /* If node->output is set, then this is a unit-at-a-time compilation
381 and we have already begun whole-unit analysis. This is *not*
382 testing for whether we've already emitted the function. That
383 case can be sort-of legitimately seen with real function
384 redefinition errors. I would argue that the front end should
385 never present us with such a case, but don't enforce that for now. */
386 gcc_assert (!node->output);
387
388 /* Reset our data structures so we can analyze the function again. */
389 memset (&node->local, 0, sizeof (node->local));
390 memset (&node->global, 0, sizeof (node->global));
391 memset (&node->rtl, 0, sizeof (node->rtl));
392 node->analyzed = false;
393 node->local.redefined_extern_inline = true;
394 node->local.finalized = false;
395
396 if (!flag_unit_at_a_time)
397 {
398 struct cgraph_node *n, *next;
399
400 for (n = cgraph_nodes; n; n = next)
401 {
402 next = n->next;
403 if (n->global.inlined_to == node)
404 cgraph_remove_node (n);
405 }
406 }
407
408 cgraph_node_remove_callees (node);
409
410 /* We may need to re-queue the node for assembling in case
411 we already proceeded it and ignored as not needed. */
412 if (node->reachable && !flag_unit_at_a_time)
413 {
414 struct cgraph_node *n;
415
416 for (n = cgraph_nodes_queue; n; n = n->next_needed)
417 if (n == node)
418 break;
419 if (!n)
420 node->reachable = 0;
421 }
422 }
423
424 static void
425 cgraph_lower_function (struct cgraph_node *node)
426 {
427 if (node->lowered)
428 return;
429 tree_lowering_passes (node->decl);
430 node->lowered = true;
431 }
432
433 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
434 logic in effect. If NESTED is true, then our caller cannot stand to have
435 the garbage collector run at the moment. We would need to either create
436 a new GC context, or just not compile right now. */
437
438 void
439 cgraph_finalize_function (tree decl, bool nested)
440 {
441 struct cgraph_node *node = cgraph_node (decl);
442
443 if (node->local.finalized)
444 cgraph_reset_node (node);
445
446 notice_global_symbol (decl);
447 node->decl = decl;
448 node->local.finalized = true;
449 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
450 if (node->nested)
451 lower_nested_functions (decl);
452 gcc_assert (!node->nested);
453
454 /* If not unit at a time, then we need to create the call graph
455 now, so that called functions can be queued and emitted now. */
456 if (!flag_unit_at_a_time)
457 {
458 cgraph_analyze_function (node);
459 cgraph_decide_inlining_incrementally (node, false);
460 }
461
462 if (decide_is_function_needed (node, decl))
463 cgraph_mark_needed_node (node);
464
465 /* Since we reclaim unreachable nodes at the end of every language
466 level unit, we need to be conservative about possible entry points
467 there. */
468 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)))
469 cgraph_mark_reachable_node (node);
470
471 /* If not unit at a time, go ahead and emit everything we've found
472 to be reachable at this time. */
473 if (!nested)
474 {
475 if (!cgraph_assemble_pending_functions ())
476 ggc_collect ();
477 }
478
479 /* If we've not yet emitted decl, tell the debug info about it. */
480 if (!TREE_ASM_WRITTEN (decl))
481 (*debug_hooks->deferred_inline_function) (decl);
482
483 /* Possibly warn about unused parameters. */
484 if (warn_unused_parameter)
485 do_warn_unused_parameter (decl);
486 }
487
488 /* Verify cgraph nodes of given cgraph node. */
489 void
490 verify_cgraph_node (struct cgraph_node *node)
491 {
492 struct cgraph_edge *e;
493 struct cgraph_node *main_clone;
494 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
495 basic_block this_block;
496 block_stmt_iterator bsi;
497 bool error_found = false;
498
499 if (errorcount || sorrycount)
500 return;
501
502 timevar_push (TV_CGRAPH_VERIFY);
503 for (e = node->callees; e; e = e->next_callee)
504 if (e->aux)
505 {
506 error ("aux field set for edge %s->%s",
507 cgraph_node_name (e->caller), cgraph_node_name (e->callee));
508 error_found = true;
509 }
510 if (node->count < 0)
511 {
512 error ("Execution count is negative");
513 error_found = true;
514 }
515 for (e = node->callers; e; e = e->next_caller)
516 {
517 if (e->count < 0)
518 {
519 error ("caller edge count is negative");
520 error_found = true;
521 }
522 if (!e->inline_failed)
523 {
524 if (node->global.inlined_to
525 != (e->caller->global.inlined_to
526 ? e->caller->global.inlined_to : e->caller))
527 {
528 error ("inlined_to pointer is wrong");
529 error_found = true;
530 }
531 if (node->callers->next_caller)
532 {
533 error ("multiple inline callers");
534 error_found = true;
535 }
536 }
537 else
538 if (node->global.inlined_to)
539 {
540 error ("inlined_to pointer set for noninline callers");
541 error_found = true;
542 }
543 }
544 if (!node->callers && node->global.inlined_to)
545 {
546 error ("inlined_to pointer is set but no predecessors found");
547 error_found = true;
548 }
549 if (node->global.inlined_to == node)
550 {
551 error ("inlined_to pointer refers to itself");
552 error_found = true;
553 }
554
555 for (main_clone = cgraph_node (node->decl); main_clone;
556 main_clone = main_clone->next_clone)
557 if (main_clone == node)
558 break;
559 if (!cgraph_node (node->decl))
560 {
561 error ("node not found in cgraph_hash");
562 error_found = true;
563 }
564
565 if (node->analyzed
566 && DECL_SAVED_TREE (node->decl) && !TREE_ASM_WRITTEN (node->decl)
567 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to))
568 {
569 if (this_cfun->cfg)
570 {
571 /* The nodes we're interested in are never shared, so walk
572 the tree ignoring duplicates. */
573 struct pointer_set_t *visited_nodes = pointer_set_create ();
574 /* Reach the trees by walking over the CFG, and note the
575 enclosing basic-blocks in the call edges. */
576 FOR_EACH_BB_FN (this_block, this_cfun)
577 for (bsi = bsi_start (this_block); !bsi_end_p (bsi); bsi_next (&bsi))
578 {
579 tree stmt = bsi_stmt (bsi);
580 tree call = get_call_expr_in (stmt);
581 tree decl;
582 if (call && (decl = get_callee_fndecl (call)))
583 {
584 struct cgraph_edge *e = cgraph_edge (node, stmt);
585 if (e)
586 {
587 if (e->aux)
588 {
589 error ("shared call_stmt:");
590 debug_generic_stmt (stmt);
591 error_found = true;
592 }
593 if (e->callee->decl != cgraph_node (decl)->decl
594 && e->inline_failed)
595 {
596 error ("edge points to wrong declaration:");
597 debug_tree (e->callee->decl);
598 fprintf (stderr," Instead of:");
599 debug_tree (decl);
600 }
601 e->aux = (void *)1;
602 }
603 else
604 {
605 error ("missing callgraph edge for call stmt:");
606 debug_generic_stmt (stmt);
607 error_found = true;
608 }
609 }
610 }
611 pointer_set_destroy (visited_nodes);
612 }
613 else
614 /* No CFG available?! */
615 gcc_unreachable ();
616
617 for (e = node->callees; e; e = e->next_callee)
618 {
619 if (!e->aux)
620 {
621 error ("edge %s->%s has no corresponding call_stmt",
622 cgraph_node_name (e->caller),
623 cgraph_node_name (e->callee));
624 debug_generic_stmt (e->call_stmt);
625 error_found = true;
626 }
627 e->aux = 0;
628 }
629 }
630 if (error_found)
631 {
632 dump_cgraph_node (stderr, node);
633 internal_error ("verify_cgraph_node failed");
634 }
635 timevar_pop (TV_CGRAPH_VERIFY);
636 }
637
638 /* Verify whole cgraph structure. */
639 void
640 verify_cgraph (void)
641 {
642 struct cgraph_node *node;
643
644 if (sorrycount || errorcount)
645 return;
646
647 for (node = cgraph_nodes; node; node = node->next)
648 verify_cgraph_node (node);
649 }
650
651 /* Output all asm statements we have stored up to be output. */
652
653 static void
654 cgraph_output_pending_asms (void)
655 {
656 struct cgraph_asm_node *can;
657
658 if (errorcount || sorrycount)
659 return;
660
661 for (can = cgraph_asm_nodes; can; can = can->next)
662 assemble_asm (can->asm_str);
663 cgraph_asm_nodes = NULL;
664 }
665
666 /* Analyze the function scheduled to be output. */
667 void
668 cgraph_analyze_function (struct cgraph_node *node)
669 {
670 tree decl = node->decl;
671
672 current_function_decl = decl;
673 push_cfun (DECL_STRUCT_FUNCTION (decl));
674 cgraph_lower_function (node);
675
676 node->local.estimated_self_stack_size = estimated_stack_frame_size ();
677 node->global.estimated_stack_size = node->local.estimated_self_stack_size;
678 node->global.stack_frame_offset = 0;
679 node->local.inlinable = tree_inlinable_function_p (decl);
680 if (!flag_unit_at_a_time)
681 node->local.self_insns = estimate_num_insns (decl);
682 if (node->local.inlinable)
683 node->local.disregard_inline_limits
684 = lang_hooks.tree_inlining.disregard_inline_limits (decl);
685 if (flag_really_no_inline && !node->local.disregard_inline_limits)
686 node->local.inlinable = 0;
687 /* Inlining characteristics are maintained by the cgraph_mark_inline. */
688 node->global.insns = node->local.self_insns;
689 if (!flag_unit_at_a_time)
690 {
691 bitmap_obstack_initialize (NULL);
692 tree_register_cfg_hooks ();
693 execute_pass_list (pass_early_local_passes.sub);
694 free_dominance_info (CDI_POST_DOMINATORS);
695 free_dominance_info (CDI_DOMINATORS);
696 bitmap_obstack_release (NULL);
697 }
698
699 node->analyzed = true;
700 pop_cfun ();
701 current_function_decl = NULL;
702 }
703
704 /* Look for externally_visible and used attributes and mark cgraph nodes
705 accordingly.
706
707 We cannot mark the nodes at the point the attributes are processed (in
708 handle_*_attribute) because the copy of the declarations available at that
709 point may not be canonical. For example, in:
710
711 void f();
712 void f() __attribute__((used));
713
714 the declaration we see in handle_used_attribute will be the second
715 declaration -- but the front end will subsequently merge that declaration
716 with the original declaration and discard the second declaration.
717
718 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
719
720 void f() {}
721 void f() __attribute__((externally_visible));
722
723 is valid.
724
725 So, we walk the nodes at the end of the translation unit, applying the
726 attributes at that point. */
727
728 static void
729 process_function_and_variable_attributes (struct cgraph_node *first,
730 struct varpool_node *first_var)
731 {
732 struct cgraph_node *node;
733 struct varpool_node *vnode;
734
735 for (node = cgraph_nodes; node != first; node = node->next)
736 {
737 tree decl = node->decl;
738 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
739 {
740 mark_decl_referenced (decl);
741 if (node->local.finalized)
742 cgraph_mark_needed_node (node);
743 }
744 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
745 {
746 if (! TREE_PUBLIC (node->decl))
747 warning (OPT_Wattributes,
748 "%J%<externally_visible%> attribute have effect only on public objects",
749 node->decl);
750 else
751 {
752 if (node->local.finalized)
753 cgraph_mark_needed_node (node);
754 node->local.externally_visible = true;
755 }
756 }
757 }
758 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
759 {
760 tree decl = vnode->decl;
761 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
762 {
763 mark_decl_referenced (decl);
764 if (vnode->finalized)
765 varpool_mark_needed_node (vnode);
766 }
767 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
768 {
769 if (! TREE_PUBLIC (vnode->decl))
770 warning (OPT_Wattributes,
771 "%J%<externally_visible%> attribute have effect only on public objects",
772 vnode->decl);
773 else
774 {
775 if (vnode->finalized)
776 varpool_mark_needed_node (vnode);
777 vnode->externally_visible = true;
778 }
779 }
780 }
781 }
782
783 /* Analyze the whole compilation unit once it is parsed completely. */
784
785 void
786 cgraph_finalize_compilation_unit (void)
787 {
788 struct cgraph_node *node, *next;
789 /* Keep track of already processed nodes when called multiple times for
790 intermodule optimization. */
791 static struct cgraph_node *first_analyzed;
792 struct cgraph_node *first_processed = first_analyzed;
793 static struct varpool_node *first_analyzed_var;
794
795 if (errorcount || sorrycount)
796 return;
797
798 finish_aliases_1 ();
799
800 if (!flag_unit_at_a_time)
801 {
802 cgraph_output_pending_asms ();
803 cgraph_assemble_pending_functions ();
804 varpool_output_debug_info ();
805 return;
806 }
807
808 if (!quiet_flag)
809 {
810 fprintf (stderr, "\nAnalyzing compilation unit\n");
811 fflush (stderr);
812 }
813
814 timevar_push (TV_CGRAPH);
815 process_function_and_variable_attributes (first_processed,
816 first_analyzed_var);
817 first_processed = cgraph_nodes;
818 first_analyzed_var = varpool_nodes;
819 varpool_analyze_pending_decls ();
820 if (cgraph_dump_file)
821 {
822 fprintf (cgraph_dump_file, "Initial entry points:");
823 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
824 if (node->needed && DECL_SAVED_TREE (node->decl))
825 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
826 fprintf (cgraph_dump_file, "\n");
827 }
828
829 /* Propagate reachability flag and lower representation of all reachable
830 functions. In the future, lowering will introduce new functions and
831 new entry points on the way (by template instantiation and virtual
832 method table generation for instance). */
833 while (cgraph_nodes_queue)
834 {
835 struct cgraph_edge *edge;
836 tree decl = cgraph_nodes_queue->decl;
837
838 node = cgraph_nodes_queue;
839 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
840 node->next_needed = NULL;
841
842 /* ??? It is possible to create extern inline function and later using
843 weak alias attribute to kill its body. See
844 gcc.c-torture/compile/20011119-1.c */
845 if (!DECL_SAVED_TREE (decl))
846 {
847 cgraph_reset_node (node);
848 continue;
849 }
850
851 gcc_assert (!node->analyzed && node->reachable);
852 gcc_assert (DECL_SAVED_TREE (decl));
853
854 cgraph_analyze_function (node);
855
856 for (edge = node->callees; edge; edge = edge->next_callee)
857 if (!edge->callee->reachable)
858 cgraph_mark_reachable_node (edge->callee);
859
860 /* We finalize local static variables during constructing callgraph
861 edges. Process their attributes too. */
862 process_function_and_variable_attributes (first_processed,
863 first_analyzed_var);
864 first_processed = cgraph_nodes;
865 first_analyzed_var = varpool_nodes;
866 varpool_analyze_pending_decls ();
867 }
868
869 /* Collect entry points to the unit. */
870 if (cgraph_dump_file)
871 {
872 fprintf (cgraph_dump_file, "Unit entry points:");
873 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
874 if (node->needed && DECL_SAVED_TREE (node->decl))
875 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
876 fprintf (cgraph_dump_file, "\n\nInitial ");
877 dump_cgraph (cgraph_dump_file);
878 }
879
880 if (cgraph_dump_file)
881 fprintf (cgraph_dump_file, "\nReclaiming functions:");
882
883 for (node = cgraph_nodes; node != first_analyzed; node = next)
884 {
885 tree decl = node->decl;
886 next = node->next;
887
888 if (node->local.finalized && !DECL_SAVED_TREE (decl))
889 cgraph_reset_node (node);
890
891 if (!node->reachable && DECL_SAVED_TREE (decl))
892 {
893 if (cgraph_dump_file)
894 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
895 cgraph_remove_node (node);
896 continue;
897 }
898 else
899 node->next_needed = NULL;
900 gcc_assert (!node->local.finalized || DECL_SAVED_TREE (decl));
901 gcc_assert (node->analyzed == node->local.finalized);
902 }
903 if (cgraph_dump_file)
904 {
905 fprintf (cgraph_dump_file, "\n\nReclaimed ");
906 dump_cgraph (cgraph_dump_file);
907 }
908 first_analyzed = cgraph_nodes;
909 ggc_collect ();
910 timevar_pop (TV_CGRAPH);
911 }
912 /* Figure out what functions we want to assemble. */
913
914 static void
915 cgraph_mark_functions_to_output (void)
916 {
917 struct cgraph_node *node;
918
919 for (node = cgraph_nodes; node; node = node->next)
920 {
921 tree decl = node->decl;
922 struct cgraph_edge *e;
923
924 gcc_assert (!node->output);
925
926 for (e = node->callers; e; e = e->next_caller)
927 if (e->inline_failed)
928 break;
929
930 /* We need to output all local functions that are used and not
931 always inlined, as well as those that are reachable from
932 outside the current compilation unit. */
933 if (DECL_SAVED_TREE (decl)
934 && !node->global.inlined_to
935 && (node->needed
936 || (e && node->reachable))
937 && !TREE_ASM_WRITTEN (decl)
938 && !DECL_EXTERNAL (decl))
939 node->output = 1;
940 else
941 {
942 /* We should've reclaimed all functions that are not needed. */
943 #ifdef ENABLE_CHECKING
944 if (!node->global.inlined_to && DECL_SAVED_TREE (decl)
945 && !DECL_EXTERNAL (decl))
946 {
947 dump_cgraph_node (stderr, node);
948 internal_error ("failed to reclaim unneeded function");
949 }
950 #endif
951 gcc_assert (node->global.inlined_to || !DECL_SAVED_TREE (decl)
952 || DECL_EXTERNAL (decl));
953
954 }
955
956 }
957 }
958
959 /* Expand function specified by NODE. */
960
961 static void
962 cgraph_expand_function (struct cgraph_node *node)
963 {
964 tree decl = node->decl;
965
966 /* We ought to not compile any inline clones. */
967 gcc_assert (!node->global.inlined_to);
968
969 if (flag_unit_at_a_time)
970 announce_function (decl);
971
972 gcc_assert (node->lowered);
973 /*cgraph_lower_function (node);*/
974
975 /* Generate RTL for the body of DECL. */
976 lang_hooks.callgraph.expand_function (decl);
977
978 /* Make sure that BE didn't give up on compiling. */
979 /* ??? Can happen with nested function of extern inline. */
980 gcc_assert (TREE_ASM_WRITTEN (node->decl));
981
982 current_function_decl = NULL;
983 if (!cgraph_preserve_function_body_p (node->decl))
984 {
985 cgraph_release_function_body (node);
986 /* Eliminate all call edges. This is important so the call_expr no longer
987 points to the dead function body. */
988 cgraph_node_remove_callees (node);
989 }
990
991 cgraph_function_flags_ready = true;
992 }
993
994 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
995
996 bool
997 cgraph_inline_p (struct cgraph_edge *e, const char **reason)
998 {
999 *reason = e->inline_failed;
1000 return !e->inline_failed;
1001 }
1002
1003
1004
1005 /* Expand all functions that must be output.
1006
1007 Attempt to topologically sort the nodes so function is output when
1008 all called functions are already assembled to allow data to be
1009 propagated across the callgraph. Use a stack to get smaller distance
1010 between a function and its callees (later we may choose to use a more
1011 sophisticated algorithm for function reordering; we will likely want
1012 to use subsections to make the output functions appear in top-down
1013 order). */
1014
1015 static void
1016 cgraph_expand_all_functions (void)
1017 {
1018 struct cgraph_node *node;
1019 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1020 int order_pos = 0, new_order_pos = 0;
1021 int i;
1022
1023 order_pos = cgraph_postorder (order);
1024 gcc_assert (order_pos == cgraph_n_nodes);
1025
1026 /* Garbage collector may remove inline clones we eliminate during
1027 optimization. So we must be sure to not reference them. */
1028 for (i = 0; i < order_pos; i++)
1029 if (order[i]->output)
1030 order[new_order_pos++] = order[i];
1031
1032 for (i = new_order_pos - 1; i >= 0; i--)
1033 {
1034 node = order[i];
1035 if (node->output)
1036 {
1037 gcc_assert (node->reachable);
1038 node->output = 0;
1039 cgraph_expand_function (node);
1040 }
1041 }
1042 cgraph_process_new_functions ();
1043
1044 free (order);
1045
1046 }
1047
1048 /* This is used to sort the node types by the cgraph order number. */
1049
1050 struct cgraph_order_sort
1051 {
1052 enum { ORDER_UNDEFINED = 0, ORDER_FUNCTION, ORDER_VAR, ORDER_ASM } kind;
1053 union
1054 {
1055 struct cgraph_node *f;
1056 struct varpool_node *v;
1057 struct cgraph_asm_node *a;
1058 } u;
1059 };
1060
1061 /* Output all functions, variables, and asm statements in the order
1062 according to their order fields, which is the order in which they
1063 appeared in the file. This implements -fno-toplevel-reorder. In
1064 this mode we may output functions and variables which don't really
1065 need to be output. */
1066
1067 static void
1068 cgraph_output_in_order (void)
1069 {
1070 int max;
1071 size_t size;
1072 struct cgraph_order_sort *nodes;
1073 int i;
1074 struct cgraph_node *pf;
1075 struct varpool_node *pv;
1076 struct cgraph_asm_node *pa;
1077
1078 max = cgraph_order;
1079 size = max * sizeof (struct cgraph_order_sort);
1080 nodes = (struct cgraph_order_sort *) alloca (size);
1081 memset (nodes, 0, size);
1082
1083 varpool_analyze_pending_decls ();
1084
1085 for (pf = cgraph_nodes; pf; pf = pf->next)
1086 {
1087 if (pf->output)
1088 {
1089 i = pf->order;
1090 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1091 nodes[i].kind = ORDER_FUNCTION;
1092 nodes[i].u.f = pf;
1093 }
1094 }
1095
1096 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
1097 {
1098 i = pv->order;
1099 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1100 nodes[i].kind = ORDER_VAR;
1101 nodes[i].u.v = pv;
1102 }
1103
1104 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1105 {
1106 i = pa->order;
1107 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1108 nodes[i].kind = ORDER_ASM;
1109 nodes[i].u.a = pa;
1110 }
1111
1112 for (i = 0; i < max; ++i)
1113 {
1114 switch (nodes[i].kind)
1115 {
1116 case ORDER_FUNCTION:
1117 nodes[i].u.f->output = 0;
1118 cgraph_expand_function (nodes[i].u.f);
1119 break;
1120
1121 case ORDER_VAR:
1122 varpool_assemble_decl (nodes[i].u.v);
1123 break;
1124
1125 case ORDER_ASM:
1126 assemble_asm (nodes[i].u.a->asm_str);
1127 break;
1128
1129 case ORDER_UNDEFINED:
1130 break;
1131
1132 default:
1133 gcc_unreachable ();
1134 }
1135 }
1136
1137 cgraph_asm_nodes = NULL;
1138 }
1139
1140 /* Return true when function body of DECL still needs to be kept around
1141 for later re-use. */
1142 bool
1143 cgraph_preserve_function_body_p (tree decl)
1144 {
1145 struct cgraph_node *node;
1146 if (!cgraph_global_info_ready)
1147 return (flag_really_no_inline
1148 ? lang_hooks.tree_inlining.disregard_inline_limits (decl)
1149 : DECL_INLINE (decl));
1150 /* Look if there is any clone around. */
1151 for (node = cgraph_node (decl); node; node = node->next_clone)
1152 if (node->global.inlined_to)
1153 return true;
1154 return false;
1155 }
1156
1157 static void
1158 ipa_passes (void)
1159 {
1160 cfun = NULL;
1161 current_function_decl = NULL;
1162 tree_register_cfg_hooks ();
1163 bitmap_obstack_initialize (NULL);
1164 execute_ipa_pass_list (all_ipa_passes);
1165 bitmap_obstack_release (NULL);
1166 }
1167
1168 /* Perform simple optimizations based on callgraph. */
1169
1170 void
1171 cgraph_optimize (void)
1172 {
1173 if (errorcount || sorrycount)
1174 return;
1175
1176 #ifdef ENABLE_CHECKING
1177 verify_cgraph ();
1178 #endif
1179 if (!flag_unit_at_a_time)
1180 {
1181 cgraph_assemble_pending_functions ();
1182 cgraph_process_new_functions ();
1183 cgraph_state = CGRAPH_STATE_FINISHED;
1184 cgraph_output_pending_asms ();
1185 varpool_assemble_pending_decls ();
1186 varpool_output_debug_info ();
1187 return;
1188 }
1189
1190 /* Frontend may output common variables after the unit has been finalized.
1191 It is safe to deal with them here as they are always zero initialized. */
1192 varpool_analyze_pending_decls ();
1193 cgraph_process_new_functions ();
1194
1195 timevar_push (TV_CGRAPHOPT);
1196 if (pre_ipa_mem_report)
1197 {
1198 fprintf (stderr, "Memory consumption before IPA\n");
1199 dump_memory_report (false);
1200 }
1201 if (!quiet_flag)
1202 fprintf (stderr, "Performing interprocedural optimizations\n");
1203 cgraph_state = CGRAPH_STATE_IPA;
1204
1205 /* Don't run the IPA passes if there was any error or sorry messages. */
1206 if (errorcount == 0 && sorrycount == 0)
1207 ipa_passes ();
1208
1209 /* This pass remove bodies of extern inline functions we never inlined.
1210 Do this later so other IPA passes see what is really going on. */
1211 cgraph_remove_unreachable_nodes (false, dump_file);
1212 cgraph_global_info_ready = true;
1213 if (cgraph_dump_file)
1214 {
1215 fprintf (cgraph_dump_file, "Optimized ");
1216 dump_cgraph (cgraph_dump_file);
1217 dump_varpool (cgraph_dump_file);
1218 }
1219 if (post_ipa_mem_report)
1220 {
1221 fprintf (stderr, "Memory consumption after IPA\n");
1222 dump_memory_report (false);
1223 }
1224 timevar_pop (TV_CGRAPHOPT);
1225
1226 /* Output everything. */
1227 if (!quiet_flag)
1228 fprintf (stderr, "Assembling functions:\n");
1229 #ifdef ENABLE_CHECKING
1230 verify_cgraph ();
1231 #endif
1232
1233 cgraph_mark_functions_to_output ();
1234
1235 cgraph_state = CGRAPH_STATE_EXPANSION;
1236 if (!flag_toplevel_reorder)
1237 cgraph_output_in_order ();
1238 else
1239 {
1240 cgraph_output_pending_asms ();
1241
1242 cgraph_expand_all_functions ();
1243 varpool_remove_unreferenced_decls ();
1244
1245 varpool_assemble_pending_decls ();
1246 varpool_output_debug_info ();
1247 }
1248 cgraph_process_new_functions ();
1249 cgraph_state = CGRAPH_STATE_FINISHED;
1250
1251 if (cgraph_dump_file)
1252 {
1253 fprintf (cgraph_dump_file, "\nFinal ");
1254 dump_cgraph (cgraph_dump_file);
1255 }
1256 #ifdef ENABLE_CHECKING
1257 verify_cgraph ();
1258 /* Double check that all inline clones are gone and that all
1259 function bodies have been released from memory. */
1260 if (flag_unit_at_a_time
1261 && !(sorrycount || errorcount))
1262 {
1263 struct cgraph_node *node;
1264 bool error_found = false;
1265
1266 for (node = cgraph_nodes; node; node = node->next)
1267 if (node->analyzed
1268 && (node->global.inlined_to
1269 || DECL_SAVED_TREE (node->decl)))
1270 {
1271 error_found = true;
1272 dump_cgraph_node (stderr, node);
1273 }
1274 if (error_found)
1275 internal_error ("nodes with no released memory found");
1276 }
1277 #endif
1278 }
1279 /* Generate and emit a static constructor or destructor. WHICH must be
1280 one of 'I' or 'D'. BODY should be a STATEMENT_LIST containing
1281 GENERIC statements. */
1282
1283 void
1284 cgraph_build_static_cdtor (char which, tree body, int priority)
1285 {
1286 static int counter = 0;
1287 char which_buf[16];
1288 tree decl, name, resdecl;
1289
1290 sprintf (which_buf, "%c_%d", which, counter++);
1291 name = get_file_function_name (which_buf);
1292
1293 decl = build_decl (FUNCTION_DECL, name,
1294 build_function_type (void_type_node, void_list_node));
1295 current_function_decl = decl;
1296
1297 resdecl = build_decl (RESULT_DECL, NULL_TREE, void_type_node);
1298 DECL_ARTIFICIAL (resdecl) = 1;
1299 DECL_IGNORED_P (resdecl) = 1;
1300 DECL_RESULT (decl) = resdecl;
1301
1302 allocate_struct_function (decl);
1303
1304 TREE_STATIC (decl) = 1;
1305 TREE_USED (decl) = 1;
1306 DECL_ARTIFICIAL (decl) = 1;
1307 DECL_IGNORED_P (decl) = 1;
1308 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1309 DECL_SAVED_TREE (decl) = body;
1310 TREE_PUBLIC (decl) = ! targetm.have_ctors_dtors;
1311 DECL_UNINLINABLE (decl) = 1;
1312
1313 DECL_INITIAL (decl) = make_node (BLOCK);
1314 TREE_USED (DECL_INITIAL (decl)) = 1;
1315
1316 DECL_SOURCE_LOCATION (decl) = input_location;
1317 cfun->function_end_locus = input_location;
1318
1319 switch (which)
1320 {
1321 case 'I':
1322 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1323 break;
1324 case 'D':
1325 DECL_STATIC_DESTRUCTOR (decl) = 1;
1326 break;
1327 default:
1328 gcc_unreachable ();
1329 }
1330
1331 gimplify_function_tree (decl);
1332
1333 cgraph_add_new_function (decl, false);
1334 cgraph_mark_needed_node (cgraph_node (decl));
1335
1336 if (targetm.have_ctors_dtors)
1337 {
1338 void (*fn) (rtx, int);
1339
1340 if (which == 'I')
1341 fn = targetm.asm_out.constructor;
1342 else
1343 fn = targetm.asm_out.destructor;
1344 fn (XEXP (DECL_RTL (decl), 0), priority);
1345 }
1346 }
1347
1348 void
1349 init_cgraph (void)
1350 {
1351 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1352 }
1353
1354 /* The edges representing the callers of the NEW_VERSION node were
1355 fixed by cgraph_function_versioning (), now the call_expr in their
1356 respective tree code should be updated to call the NEW_VERSION. */
1357
1358 static void
1359 update_call_expr (struct cgraph_node *new_version)
1360 {
1361 struct cgraph_edge *e;
1362
1363 gcc_assert (new_version);
1364 for (e = new_version->callers; e; e = e->next_caller)
1365 /* Update the call expr on the edges
1366 to call the new version. */
1367 TREE_OPERAND (TREE_OPERAND (get_call_expr_in (e->call_stmt), 0), 0) = new_version->decl;
1368 }
1369
1370
1371 /* Create a new cgraph node which is the new version of
1372 OLD_VERSION node. REDIRECT_CALLERS holds the callers
1373 edges which should be redirected to point to
1374 NEW_VERSION. ALL the callees edges of OLD_VERSION
1375 are cloned to the new version node. Return the new
1376 version node. */
1377
1378 static struct cgraph_node *
1379 cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
1380 tree new_decl,
1381 VEC(cgraph_edge_p,heap) *redirect_callers)
1382 {
1383 struct cgraph_node *new_version;
1384 struct cgraph_edge *e, *new_e;
1385 struct cgraph_edge *next_callee;
1386 unsigned i;
1387
1388 gcc_assert (old_version);
1389
1390 new_version = cgraph_node (new_decl);
1391
1392 new_version->analyzed = true;
1393 new_version->local = old_version->local;
1394 new_version->global = old_version->global;
1395 new_version->rtl = new_version->rtl;
1396 new_version->reachable = true;
1397 new_version->count = old_version->count;
1398
1399 /* Clone the old node callees. Recursive calls are
1400 also cloned. */
1401 for (e = old_version->callees;e; e=e->next_callee)
1402 {
1403 new_e = cgraph_clone_edge (e, new_version, e->call_stmt, 0, e->loop_nest, true);
1404 new_e->count = e->count;
1405 }
1406 /* Fix recursive calls.
1407 If OLD_VERSION has a recursive call after the
1408 previous edge cloning, the new version will have an edge
1409 pointing to the old version, which is wrong;
1410 Redirect it to point to the new version. */
1411 for (e = new_version->callees ; e; e = next_callee)
1412 {
1413 next_callee = e->next_callee;
1414 if (e->callee == old_version)
1415 cgraph_redirect_edge_callee (e, new_version);
1416
1417 if (!next_callee)
1418 break;
1419 }
1420 for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
1421 {
1422 /* Redirect calls to the old version node to point to its new
1423 version. */
1424 cgraph_redirect_edge_callee (e, new_version);
1425 }
1426
1427 return new_version;
1428 }
1429
1430 /* Perform function versioning.
1431 Function versioning includes copying of the tree and
1432 a callgraph update (creating a new cgraph node and updating
1433 its callees and callers).
1434
1435 REDIRECT_CALLERS varray includes the edges to be redirected
1436 to the new version.
1437
1438 TREE_MAP is a mapping of tree nodes we want to replace with
1439 new ones (according to results of prior analysis).
1440 OLD_VERSION_NODE is the node that is versioned.
1441 It returns the new version's cgraph node. */
1442
1443 struct cgraph_node *
1444 cgraph_function_versioning (struct cgraph_node *old_version_node,
1445 VEC(cgraph_edge_p,heap) *redirect_callers,
1446 varray_type tree_map)
1447 {
1448 tree old_decl = old_version_node->decl;
1449 struct cgraph_node *new_version_node = NULL;
1450 tree new_decl;
1451
1452 if (!tree_versionable_function_p (old_decl))
1453 return NULL;
1454
1455 /* Make a new FUNCTION_DECL tree node for the
1456 new version. */
1457 new_decl = copy_node (old_decl);
1458
1459 /* Create the new version's call-graph node.
1460 and update the edges of the new node. */
1461 new_version_node =
1462 cgraph_copy_node_for_versioning (old_version_node, new_decl,
1463 redirect_callers);
1464
1465 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1466 tree_function_versioning (old_decl, new_decl, tree_map, false);
1467 /* Update the call_expr on the edges to call the new version node. */
1468 update_call_expr (new_version_node);
1469
1470 /* Update the new version's properties.
1471 Make The new version visible only within this translation unit.
1472 ??? We cannot use COMDAT linkage because there is no
1473 ABI support for this. */
1474 DECL_EXTERNAL (new_version_node->decl) = 0;
1475 DECL_ONE_ONLY (new_version_node->decl) = 0;
1476 TREE_PUBLIC (new_version_node->decl) = 0;
1477 DECL_COMDAT (new_version_node->decl) = 0;
1478 new_version_node->local.externally_visible = 0;
1479 new_version_node->local.local = 1;
1480 new_version_node->lowered = true;
1481 return new_version_node;
1482 }
1483
1484 /* Produce separate function body for inline clones so the offline copy can be
1485 modified without affecting them. */
1486 struct cgraph_node *
1487 save_inline_function_body (struct cgraph_node *node)
1488 {
1489 struct cgraph_node *first_clone;
1490
1491 gcc_assert (node == cgraph_node (node->decl));
1492
1493 cgraph_lower_function (node);
1494
1495 /* In non-unit-at-a-time we construct full fledged clone we never output to
1496 assembly file. This clone is pointed out by inline_decl of original function
1497 and inlining infrastructure knows how to deal with this. */
1498 if (!flag_unit_at_a_time)
1499 {
1500 struct cgraph_edge *e;
1501
1502 first_clone = cgraph_clone_node (node, node->count, 0, false);
1503 first_clone->needed = 0;
1504 first_clone->reachable = 1;
1505 /* Recursively clone all bodies. */
1506 for (e = first_clone->callees; e; e = e->next_callee)
1507 if (!e->inline_failed)
1508 cgraph_clone_inlined_nodes (e, true, false);
1509 }
1510 else
1511 first_clone = node->next_clone;
1512
1513 first_clone->decl = copy_node (node->decl);
1514 node->next_clone = NULL;
1515 if (!flag_unit_at_a_time)
1516 node->inline_decl = first_clone->decl;
1517 first_clone->prev_clone = NULL;
1518 cgraph_insert_node_to_hashtable (first_clone);
1519 gcc_assert (first_clone == cgraph_node (first_clone->decl));
1520
1521 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1522 tree_function_versioning (node->decl, first_clone->decl, NULL, true);
1523
1524 DECL_EXTERNAL (first_clone->decl) = 0;
1525 DECL_ONE_ONLY (first_clone->decl) = 0;
1526 TREE_PUBLIC (first_clone->decl) = 0;
1527 DECL_COMDAT (first_clone->decl) = 0;
1528
1529 for (node = first_clone->next_clone; node; node = node->next_clone)
1530 node->decl = first_clone->decl;
1531 #ifdef ENABLE_CHECKING
1532 verify_cgraph_node (first_clone);
1533 #endif
1534 return first_clone;
1535 }