cgraph.c (cgraph_function_possibly_inlined_p): Do not rely on DECL_INLINE.
[gcc.git] / gcc / cgraphunit.c
1 /* Callgraph based interprocedural optimizations.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This module implements main driver of compilation process as well as
23 few basic interprocedural optimizers.
24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
35 (There is one exception needed for implementing GCC extern inline
36 function.)
37
38 - varpool_finalize_variable
39
40 This function has same behavior as the above but is used for static
41 variables.
42
43 - cgraph_finalize_compilation_unit
44
45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
47
48 In the the call-graph construction and local function
49 analysis takes place here. Bodies of unreachable functions are released
50 to conserve memory usage.
51
52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
54
55 - cgraph_optimize
56
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
61
62 - cgraph_mark_needed_node
63 - varpool_mark_needed_node
64
65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
69 used by C++ frontend to explicitly mark the keyed methods.
70
71 - analyze_expr callback
72
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
76
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
79
80 Analyzing of all functions is deferred
81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
82
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
90
91 The intra-procedural information is produced and its existence
92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
95
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
98
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
103
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
106 where reference has been optimized out. */
107
108
109 #include "config.h"
110 #include "system.h"
111 #include "coretypes.h"
112 #include "tm.h"
113 #include "tree.h"
114 #include "rtl.h"
115 #include "tree-flow.h"
116 #include "tree-inline.h"
117 #include "langhooks.h"
118 #include "pointer-set.h"
119 #include "toplev.h"
120 #include "flags.h"
121 #include "ggc.h"
122 #include "debug.h"
123 #include "target.h"
124 #include "cgraph.h"
125 #include "diagnostic.h"
126 #include "timevar.h"
127 #include "params.h"
128 #include "fibheap.h"
129 #include "c-common.h"
130 #include "intl.h"
131 #include "function.h"
132 #include "ipa-prop.h"
133 #include "tree-gimple.h"
134 #include "tree-pass.h"
135 #include "output.h"
136
137 static void cgraph_expand_all_functions (void);
138 static void cgraph_mark_functions_to_output (void);
139 static void cgraph_expand_function (struct cgraph_node *);
140 static void cgraph_output_pending_asms (void);
141
142 static FILE *cgraph_dump_file;
143
144 /* A vector of FUNCTION_DECLs declared as static constructors. */
145 static GTY (()) VEC(tree, gc) *static_ctors;
146 /* A vector of FUNCTION_DECLs declared as static destructors. */
147 static GTY (()) VEC(tree, gc) *static_dtors;
148
149 /* When target does not have ctors and dtors, we call all constructor
150 and destructor by special initialization/destruction function
151 recognized by collect2.
152
153 When we are going to build this function, collect all constructors and
154 destructors and turn them into normal functions. */
155
156 static void
157 record_cdtor_fn (tree fndecl)
158 {
159 struct cgraph_node *node;
160 if (targetm.have_ctors_dtors
161 || (!DECL_STATIC_CONSTRUCTOR (fndecl)
162 && !DECL_STATIC_DESTRUCTOR (fndecl)))
163 return;
164
165 if (DECL_STATIC_CONSTRUCTOR (fndecl))
166 {
167 VEC_safe_push (tree, gc, static_ctors, fndecl);
168 DECL_STATIC_CONSTRUCTOR (fndecl) = 0;
169 }
170 if (DECL_STATIC_DESTRUCTOR (fndecl))
171 {
172 VEC_safe_push (tree, gc, static_dtors, fndecl);
173 DECL_STATIC_DESTRUCTOR (fndecl) = 0;
174 }
175 node = cgraph_node (fndecl);
176 node->local.disregard_inline_limits = 1;
177 cgraph_mark_reachable_node (node);
178 }
179
180 /* Define global constructors/destructor functions for the CDTORS, of
181 which they are LEN. The CDTORS are sorted by initialization
182 priority. If CTOR_P is true, these are constructors; otherwise,
183 they are destructors. */
184
185 static void
186 build_cdtor (bool ctor_p, tree *cdtors, size_t len)
187 {
188 size_t i;
189
190 i = 0;
191 while (i < len)
192 {
193 tree body;
194 tree fn;
195 priority_type priority;
196
197 priority = 0;
198 body = NULL_TREE;
199 /* Find the next batch of constructors/destructors with the same
200 initialization priority. */
201 do
202 {
203 priority_type p;
204 fn = cdtors[i];
205 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
206 if (!body)
207 priority = p;
208 else if (p != priority)
209 break;
210 append_to_statement_list (build_function_call_expr (fn, 0),
211 &body);
212 ++i;
213 }
214 while (i < len);
215 gcc_assert (body != NULL_TREE);
216 /* Generate a function to call all the function of like
217 priority. */
218 cgraph_build_static_cdtor (ctor_p ? 'I' : 'D', body, priority);
219 }
220 }
221
222 /* Comparison function for qsort. P1 and P2 are actually of type
223 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
224 used to determine the sort order. */
225
226 static int
227 compare_ctor (const void *p1, const void *p2)
228 {
229 tree f1;
230 tree f2;
231 int priority1;
232 int priority2;
233
234 f1 = *(const tree *)p1;
235 f2 = *(const tree *)p2;
236 priority1 = DECL_INIT_PRIORITY (f1);
237 priority2 = DECL_INIT_PRIORITY (f2);
238
239 if (priority1 < priority2)
240 return -1;
241 else if (priority1 > priority2)
242 return 1;
243 else
244 /* Ensure a stable sort. */
245 return (const tree *)p1 - (const tree *)p2;
246 }
247
248 /* Comparison function for qsort. P1 and P2 are actually of type
249 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
250 used to determine the sort order. */
251
252 static int
253 compare_dtor (const void *p1, const void *p2)
254 {
255 tree f1;
256 tree f2;
257 int priority1;
258 int priority2;
259
260 f1 = *(const tree *)p1;
261 f2 = *(const tree *)p2;
262 priority1 = DECL_FINI_PRIORITY (f1);
263 priority2 = DECL_FINI_PRIORITY (f2);
264
265 if (priority1 < priority2)
266 return -1;
267 else if (priority1 > priority2)
268 return 1;
269 else
270 /* Ensure a stable sort. */
271 return (const tree *)p1 - (const tree *)p2;
272 }
273
274 /* Generate functions to call static constructors and destructors
275 for targets that do not support .ctors/.dtors sections. These
276 functions have magic names which are detected by collect2. */
277
278 static void
279 cgraph_build_cdtor_fns (void)
280 {
281 if (!VEC_empty (tree, static_ctors))
282 {
283 gcc_assert (!targetm.have_ctors_dtors);
284 qsort (VEC_address (tree, static_ctors),
285 VEC_length (tree, static_ctors),
286 sizeof (tree),
287 compare_ctor);
288 build_cdtor (/*ctor_p=*/true,
289 VEC_address (tree, static_ctors),
290 VEC_length (tree, static_ctors));
291 VEC_truncate (tree, static_ctors, 0);
292 }
293
294 if (!VEC_empty (tree, static_dtors))
295 {
296 gcc_assert (!targetm.have_ctors_dtors);
297 qsort (VEC_address (tree, static_dtors),
298 VEC_length (tree, static_dtors),
299 sizeof (tree),
300 compare_dtor);
301 build_cdtor (/*ctor_p=*/false,
302 VEC_address (tree, static_dtors),
303 VEC_length (tree, static_dtors));
304 VEC_truncate (tree, static_dtors, 0);
305 }
306 }
307
308 /* Determine if function DECL is needed. That is, visible to something
309 either outside this translation unit, something magic in the system
310 configury. */
311
312 static bool
313 decide_is_function_needed (struct cgraph_node *node, tree decl)
314 {
315 if (MAIN_NAME_P (DECL_NAME (decl))
316 && TREE_PUBLIC (decl))
317 {
318 node->local.externally_visible = true;
319 return true;
320 }
321
322 /* If the user told us it is used, then it must be so. */
323 if (node->local.externally_visible)
324 return true;
325
326 /* ??? If the assembler name is set by hand, it is possible to assemble
327 the name later after finalizing the function and the fact is noticed
328 in assemble_name then. This is arguably a bug. */
329 if (DECL_ASSEMBLER_NAME_SET_P (decl)
330 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
331 return true;
332
333 /* With -fkeep-inline-functions we are keeping all inline functions except
334 for extern inline ones. */
335 if (flag_keep_inline_functions
336 && DECL_DECLARED_INLINE_P (decl)
337 && !DECL_EXTERNAL (decl)
338 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl)))
339 return true;
340
341 /* If we decided it was needed before, but at the time we didn't have
342 the body of the function available, then it's still needed. We have
343 to go back and re-check its dependencies now. */
344 if (node->needed)
345 return true;
346
347 /* Externally visible functions must be output. The exception is
348 COMDAT functions that must be output only when they are needed.
349
350 When not optimizing, also output the static functions. (see
351 PR24561), but don't do so for always_inline functions, functions
352 declared inline and nested functions. These was optimized out
353 in the original implementation and it is unclear whether we want
354 to change the behavior here. */
355 if (((TREE_PUBLIC (decl)
356 || (!optimize && !node->local.disregard_inline_limits
357 && !DECL_DECLARED_INLINE_P (decl)
358 && !node->origin))
359 && !flag_whole_program)
360 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
361 return true;
362
363 /* Constructors and destructors are reachable from the runtime by
364 some mechanism. */
365 if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
366 return true;
367
368 return false;
369 }
370
371 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
372 functions into callgraph in a way so they look like ordinary reachable
373 functions inserted into callgraph already at construction time. */
374
375 bool
376 cgraph_process_new_functions (void)
377 {
378 bool output = false;
379 tree fndecl;
380 struct cgraph_node *node;
381
382 /* Note that this queue may grow as its being processed, as the new
383 functions may generate new ones. */
384 while (cgraph_new_nodes)
385 {
386 node = cgraph_new_nodes;
387 fndecl = node->decl;
388 cgraph_new_nodes = cgraph_new_nodes->next_needed;
389 switch (cgraph_state)
390 {
391 case CGRAPH_STATE_CONSTRUCTION:
392 /* At construction time we just need to finalize function and move
393 it into reachable functions list. */
394
395 node->next_needed = NULL;
396 cgraph_finalize_function (fndecl, false);
397 cgraph_mark_reachable_node (node);
398 output = true;
399 break;
400
401 case CGRAPH_STATE_IPA:
402 case CGRAPH_STATE_IPA_SSA:
403 /* When IPA optimization already started, do all essential
404 transformations that has been already performed on the whole
405 cgraph but not on this function. */
406
407 tree_register_cfg_hooks ();
408 if (!node->analyzed)
409 cgraph_analyze_function (node);
410 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
411 current_function_decl = fndecl;
412 compute_inline_parameters (node);
413 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
414 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
415 /* When not optimizing, be sure we run early local passes anyway
416 to expand OMP. */
417 || !optimize)
418 execute_pass_list (pass_early_local_passes.pass.sub);
419 free_dominance_info (CDI_POST_DOMINATORS);
420 free_dominance_info (CDI_DOMINATORS);
421 pop_cfun ();
422 current_function_decl = NULL;
423 break;
424
425 case CGRAPH_STATE_EXPANSION:
426 /* Functions created during expansion shall be compiled
427 directly. */
428 node->output = 0;
429 cgraph_expand_function (node);
430 break;
431
432 default:
433 gcc_unreachable ();
434 break;
435 }
436 }
437 return output;
438 }
439
440 /* As an GCC extension we allow redefinition of the function. The
441 semantics when both copies of bodies differ is not well defined.
442 We replace the old body with new body so in unit at a time mode
443 we always use new body, while in normal mode we may end up with
444 old body inlined into some functions and new body expanded and
445 inlined in others.
446
447 ??? It may make more sense to use one body for inlining and other
448 body for expanding the function but this is difficult to do. */
449
450 static void
451 cgraph_reset_node (struct cgraph_node *node)
452 {
453 /* If node->output is set, then we have already begun whole-unit analysis.
454 This is *not* testing for whether we've already emitted the function.
455 That case can be sort-of legitimately seen with real function redefinition
456 errors. I would argue that the front end should never present us with
457 such a case, but don't enforce that for now. */
458 gcc_assert (!node->output);
459
460 /* Reset our data structures so we can analyze the function again. */
461 memset (&node->local, 0, sizeof (node->local));
462 memset (&node->global, 0, sizeof (node->global));
463 memset (&node->rtl, 0, sizeof (node->rtl));
464 node->analyzed = false;
465 node->local.redefined_extern_inline = true;
466 node->local.finalized = false;
467
468 cgraph_node_remove_callees (node);
469
470 /* We may need to re-queue the node for assembling in case
471 we already proceeded it and ignored as not needed or got
472 a re-declaration in IMA mode. */
473 if (node->reachable)
474 {
475 struct cgraph_node *n;
476
477 for (n = cgraph_nodes_queue; n; n = n->next_needed)
478 if (n == node)
479 break;
480 if (!n)
481 node->reachable = 0;
482 }
483 }
484
485 static void
486 cgraph_lower_function (struct cgraph_node *node)
487 {
488 if (node->lowered)
489 return;
490 tree_lowering_passes (node->decl);
491 node->lowered = true;
492 }
493
494 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
495 logic in effect. If NESTED is true, then our caller cannot stand to have
496 the garbage collector run at the moment. We would need to either create
497 a new GC context, or just not compile right now. */
498
499 void
500 cgraph_finalize_function (tree decl, bool nested)
501 {
502 struct cgraph_node *node = cgraph_node (decl);
503
504 if (node->local.finalized)
505 cgraph_reset_node (node);
506
507 node->pid = cgraph_max_pid ++;
508 notice_global_symbol (decl);
509 node->local.finalized = true;
510 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
511 record_cdtor_fn (node->decl);
512 if (node->nested)
513 lower_nested_functions (decl);
514 gcc_assert (!node->nested);
515
516 if (decide_is_function_needed (node, decl))
517 cgraph_mark_needed_node (node);
518
519 /* Since we reclaim unreachable nodes at the end of every language
520 level unit, we need to be conservative about possible entry points
521 there. */
522 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)))
523 cgraph_mark_reachable_node (node);
524
525 /* If we've not yet emitted decl, tell the debug info about it. */
526 if (!TREE_ASM_WRITTEN (decl))
527 (*debug_hooks->deferred_inline_function) (decl);
528
529 /* Possibly warn about unused parameters. */
530 if (warn_unused_parameter)
531 do_warn_unused_parameter (decl);
532
533 if (!nested)
534 ggc_collect ();
535 }
536
537 /* C99 extern inline keywords allow changing of declaration after function
538 has been finalized. We need to re-decide if we want to mark the function as
539 needed then. */
540
541 void
542 cgraph_mark_if_needed (tree decl)
543 {
544 struct cgraph_node *node = cgraph_node (decl);
545 if (node->local.finalized && decide_is_function_needed (node, decl))
546 cgraph_mark_needed_node (node);
547 }
548
549 /* Verify cgraph nodes of given cgraph node. */
550 void
551 verify_cgraph_node (struct cgraph_node *node)
552 {
553 struct cgraph_edge *e;
554 struct cgraph_node *main_clone;
555 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
556 struct function *saved_cfun = cfun;
557 basic_block this_block;
558 block_stmt_iterator bsi;
559 bool error_found = false;
560
561 if (errorcount || sorrycount)
562 return;
563
564 timevar_push (TV_CGRAPH_VERIFY);
565 /* debug_generic_stmt needs correct cfun */
566 set_cfun (this_cfun);
567 for (e = node->callees; e; e = e->next_callee)
568 if (e->aux)
569 {
570 error ("aux field set for edge %s->%s",
571 cgraph_node_name (e->caller), cgraph_node_name (e->callee));
572 error_found = true;
573 }
574 if (node->count < 0)
575 {
576 error ("Execution count is negative");
577 error_found = true;
578 }
579 for (e = node->callers; e; e = e->next_caller)
580 {
581 if (e->count < 0)
582 {
583 error ("caller edge count is negative");
584 error_found = true;
585 }
586 if (e->frequency < 0)
587 {
588 error ("caller edge frequency is negative");
589 error_found = true;
590 }
591 if (e->frequency > CGRAPH_FREQ_MAX)
592 {
593 error ("caller edge frequency is too large");
594 error_found = true;
595 }
596 if (!e->inline_failed)
597 {
598 if (node->global.inlined_to
599 != (e->caller->global.inlined_to
600 ? e->caller->global.inlined_to : e->caller))
601 {
602 error ("inlined_to pointer is wrong");
603 error_found = true;
604 }
605 if (node->callers->next_caller)
606 {
607 error ("multiple inline callers");
608 error_found = true;
609 }
610 }
611 else
612 if (node->global.inlined_to)
613 {
614 error ("inlined_to pointer set for noninline callers");
615 error_found = true;
616 }
617 }
618 if (!node->callers && node->global.inlined_to)
619 {
620 error ("inlined_to pointer is set but no predecessors found");
621 error_found = true;
622 }
623 if (node->global.inlined_to == node)
624 {
625 error ("inlined_to pointer refers to itself");
626 error_found = true;
627 }
628
629 for (main_clone = cgraph_node (node->decl); main_clone;
630 main_clone = main_clone->next_clone)
631 if (main_clone == node)
632 break;
633 if (!cgraph_node (node->decl))
634 {
635 error ("node not found in cgraph_hash");
636 error_found = true;
637 }
638
639 if (node->analyzed
640 && DECL_SAVED_TREE (node->decl) && !TREE_ASM_WRITTEN (node->decl)
641 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to))
642 {
643 if (this_cfun->cfg)
644 {
645 /* The nodes we're interested in are never shared, so walk
646 the tree ignoring duplicates. */
647 struct pointer_set_t *visited_nodes = pointer_set_create ();
648 /* Reach the trees by walking over the CFG, and note the
649 enclosing basic-blocks in the call edges. */
650 FOR_EACH_BB_FN (this_block, this_cfun)
651 for (bsi = bsi_start (this_block); !bsi_end_p (bsi); bsi_next (&bsi))
652 {
653 tree stmt = bsi_stmt (bsi);
654 tree call = get_call_expr_in (stmt);
655 tree decl;
656 if (call && (decl = get_callee_fndecl (call)))
657 {
658 struct cgraph_edge *e = cgraph_edge (node, stmt);
659 if (e)
660 {
661 if (e->aux)
662 {
663 error ("shared call_stmt:");
664 debug_generic_stmt (stmt);
665 error_found = true;
666 }
667 if (e->callee->decl != cgraph_node (decl)->decl
668 && e->inline_failed)
669 {
670 error ("edge points to wrong declaration:");
671 debug_tree (e->callee->decl);
672 fprintf (stderr," Instead of:");
673 debug_tree (decl);
674 }
675 e->aux = (void *)1;
676 }
677 else
678 {
679 error ("missing callgraph edge for call stmt:");
680 debug_generic_stmt (stmt);
681 error_found = true;
682 }
683 }
684 }
685 pointer_set_destroy (visited_nodes);
686 }
687 else
688 /* No CFG available?! */
689 gcc_unreachable ();
690
691 for (e = node->callees; e; e = e->next_callee)
692 {
693 if (!e->aux && !e->indirect_call)
694 {
695 error ("edge %s->%s has no corresponding call_stmt",
696 cgraph_node_name (e->caller),
697 cgraph_node_name (e->callee));
698 debug_generic_stmt (e->call_stmt);
699 error_found = true;
700 }
701 e->aux = 0;
702 }
703 }
704 if (error_found)
705 {
706 dump_cgraph_node (stderr, node);
707 internal_error ("verify_cgraph_node failed");
708 }
709 set_cfun (saved_cfun);
710 timevar_pop (TV_CGRAPH_VERIFY);
711 }
712
713 /* Verify whole cgraph structure. */
714 void
715 verify_cgraph (void)
716 {
717 struct cgraph_node *node;
718
719 if (sorrycount || errorcount)
720 return;
721
722 for (node = cgraph_nodes; node; node = node->next)
723 verify_cgraph_node (node);
724 }
725
726 /* Output all asm statements we have stored up to be output. */
727
728 static void
729 cgraph_output_pending_asms (void)
730 {
731 struct cgraph_asm_node *can;
732
733 if (errorcount || sorrycount)
734 return;
735
736 for (can = cgraph_asm_nodes; can; can = can->next)
737 assemble_asm (can->asm_str);
738 cgraph_asm_nodes = NULL;
739 }
740
741 /* Analyze the function scheduled to be output. */
742 void
743 cgraph_analyze_function (struct cgraph_node *node)
744 {
745 tree decl = node->decl;
746
747 current_function_decl = decl;
748 push_cfun (DECL_STRUCT_FUNCTION (decl));
749 cgraph_lower_function (node);
750 node->analyzed = true;
751
752 pop_cfun ();
753 current_function_decl = NULL;
754 }
755
756 /* Look for externally_visible and used attributes and mark cgraph nodes
757 accordingly.
758
759 We cannot mark the nodes at the point the attributes are processed (in
760 handle_*_attribute) because the copy of the declarations available at that
761 point may not be canonical. For example, in:
762
763 void f();
764 void f() __attribute__((used));
765
766 the declaration we see in handle_used_attribute will be the second
767 declaration -- but the front end will subsequently merge that declaration
768 with the original declaration and discard the second declaration.
769
770 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
771
772 void f() {}
773 void f() __attribute__((externally_visible));
774
775 is valid.
776
777 So, we walk the nodes at the end of the translation unit, applying the
778 attributes at that point. */
779
780 static void
781 process_function_and_variable_attributes (struct cgraph_node *first,
782 struct varpool_node *first_var)
783 {
784 struct cgraph_node *node;
785 struct varpool_node *vnode;
786
787 for (node = cgraph_nodes; node != first; node = node->next)
788 {
789 tree decl = node->decl;
790 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
791 {
792 mark_decl_referenced (decl);
793 if (node->local.finalized)
794 cgraph_mark_needed_node (node);
795 }
796 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
797 {
798 if (! TREE_PUBLIC (node->decl))
799 warning (OPT_Wattributes,
800 "%J%<externally_visible%> attribute have effect only on public objects",
801 node->decl);
802 else
803 {
804 if (node->local.finalized)
805 cgraph_mark_needed_node (node);
806 node->local.externally_visible = true;
807 }
808 }
809 }
810 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
811 {
812 tree decl = vnode->decl;
813 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
814 {
815 mark_decl_referenced (decl);
816 if (vnode->finalized)
817 varpool_mark_needed_node (vnode);
818 }
819 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
820 {
821 if (! TREE_PUBLIC (vnode->decl))
822 warning (OPT_Wattributes,
823 "%J%<externally_visible%> attribute have effect only on public objects",
824 vnode->decl);
825 else
826 {
827 if (vnode->finalized)
828 varpool_mark_needed_node (vnode);
829 vnode->externally_visible = true;
830 }
831 }
832 }
833 }
834
835 /* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
836 each reachable functions) and build cgraph.
837 The function can be called multiple times after inserting new nodes
838 into beginning of queue. Just the new part of queue is re-scanned then. */
839
840 static void
841 cgraph_analyze_functions (void)
842 {
843 /* Keep track of already processed nodes when called multiple times for
844 intermodule optimization. */
845 static struct cgraph_node *first_analyzed;
846 struct cgraph_node *first_processed = first_analyzed;
847 static struct varpool_node *first_analyzed_var;
848 struct cgraph_node *node, *next;
849
850 process_function_and_variable_attributes (first_processed,
851 first_analyzed_var);
852 first_processed = cgraph_nodes;
853 first_analyzed_var = varpool_nodes;
854 varpool_analyze_pending_decls ();
855 if (cgraph_dump_file)
856 {
857 fprintf (cgraph_dump_file, "Initial entry points:");
858 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
859 if (node->needed && DECL_SAVED_TREE (node->decl))
860 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
861 fprintf (cgraph_dump_file, "\n");
862 }
863 cgraph_process_new_functions ();
864
865 /* Propagate reachability flag and lower representation of all reachable
866 functions. In the future, lowering will introduce new functions and
867 new entry points on the way (by template instantiation and virtual
868 method table generation for instance). */
869 while (cgraph_nodes_queue)
870 {
871 struct cgraph_edge *edge;
872 tree decl = cgraph_nodes_queue->decl;
873
874 node = cgraph_nodes_queue;
875 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
876 node->next_needed = NULL;
877
878 /* ??? It is possible to create extern inline function and later using
879 weak alias attribute to kill its body. See
880 gcc.c-torture/compile/20011119-1.c */
881 if (!DECL_SAVED_TREE (decl))
882 {
883 cgraph_reset_node (node);
884 continue;
885 }
886
887 gcc_assert (!node->analyzed && node->reachable);
888 gcc_assert (DECL_SAVED_TREE (decl));
889
890 cgraph_analyze_function (node);
891
892 for (edge = node->callees; edge; edge = edge->next_callee)
893 if (!edge->callee->reachable)
894 cgraph_mark_reachable_node (edge->callee);
895
896 /* We finalize local static variables during constructing callgraph
897 edges. Process their attributes too. */
898 process_function_and_variable_attributes (first_processed,
899 first_analyzed_var);
900 first_processed = cgraph_nodes;
901 first_analyzed_var = varpool_nodes;
902 varpool_analyze_pending_decls ();
903 cgraph_process_new_functions ();
904 }
905
906 /* Collect entry points to the unit. */
907 if (cgraph_dump_file)
908 {
909 fprintf (cgraph_dump_file, "Unit entry points:");
910 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
911 if (node->needed && DECL_SAVED_TREE (node->decl))
912 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
913 fprintf (cgraph_dump_file, "\n\nInitial ");
914 dump_cgraph (cgraph_dump_file);
915 }
916
917 if (cgraph_dump_file)
918 fprintf (cgraph_dump_file, "\nReclaiming functions:");
919
920 for (node = cgraph_nodes; node != first_analyzed; node = next)
921 {
922 tree decl = node->decl;
923 next = node->next;
924
925 if (node->local.finalized && !DECL_SAVED_TREE (decl))
926 cgraph_reset_node (node);
927
928 if (!node->reachable && DECL_SAVED_TREE (decl))
929 {
930 if (cgraph_dump_file)
931 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
932 cgraph_remove_node (node);
933 continue;
934 }
935 else
936 node->next_needed = NULL;
937 gcc_assert (!node->local.finalized || DECL_SAVED_TREE (decl));
938 gcc_assert (node->analyzed == node->local.finalized);
939 }
940 if (cgraph_dump_file)
941 {
942 fprintf (cgraph_dump_file, "\n\nReclaimed ");
943 dump_cgraph (cgraph_dump_file);
944 }
945 first_analyzed = cgraph_nodes;
946 ggc_collect ();
947 }
948
949 /* Analyze the whole compilation unit once it is parsed completely. */
950
951 void
952 cgraph_finalize_compilation_unit (void)
953 {
954 if (errorcount || sorrycount)
955 return;
956
957 finish_aliases_1 ();
958
959 if (!quiet_flag)
960 {
961 fprintf (stderr, "\nAnalyzing compilation unit\n");
962 fflush (stderr);
963 }
964
965 timevar_push (TV_CGRAPH);
966 cgraph_analyze_functions ();
967 timevar_pop (TV_CGRAPH);
968 }
969 /* Figure out what functions we want to assemble. */
970
971 static void
972 cgraph_mark_functions_to_output (void)
973 {
974 struct cgraph_node *node;
975
976 for (node = cgraph_nodes; node; node = node->next)
977 {
978 tree decl = node->decl;
979 struct cgraph_edge *e;
980
981 gcc_assert (!node->output);
982
983 for (e = node->callers; e; e = e->next_caller)
984 if (e->inline_failed)
985 break;
986
987 /* We need to output all local functions that are used and not
988 always inlined, as well as those that are reachable from
989 outside the current compilation unit. */
990 if (DECL_SAVED_TREE (decl)
991 && !node->global.inlined_to
992 && (node->needed
993 || (e && node->reachable))
994 && !TREE_ASM_WRITTEN (decl)
995 && !DECL_EXTERNAL (decl))
996 node->output = 1;
997 else
998 {
999 /* We should've reclaimed all functions that are not needed. */
1000 #ifdef ENABLE_CHECKING
1001 if (!node->global.inlined_to && DECL_SAVED_TREE (decl)
1002 && !DECL_EXTERNAL (decl))
1003 {
1004 dump_cgraph_node (stderr, node);
1005 internal_error ("failed to reclaim unneeded function");
1006 }
1007 #endif
1008 gcc_assert (node->global.inlined_to || !DECL_SAVED_TREE (decl)
1009 || DECL_EXTERNAL (decl));
1010
1011 }
1012
1013 }
1014 }
1015
1016 /* Expand function specified by NODE. */
1017
1018 static void
1019 cgraph_expand_function (struct cgraph_node *node)
1020 {
1021 tree decl = node->decl;
1022
1023 /* We ought to not compile any inline clones. */
1024 gcc_assert (!node->global.inlined_to);
1025
1026 announce_function (decl);
1027
1028 gcc_assert (node->lowered);
1029
1030 /* Generate RTL for the body of DECL. */
1031 if (lang_hooks.callgraph.emit_associated_thunks)
1032 lang_hooks.callgraph.emit_associated_thunks (decl);
1033 tree_rest_of_compilation (decl);
1034
1035 /* Make sure that BE didn't give up on compiling. */
1036 /* ??? Can happen with nested function of extern inline. */
1037 gcc_assert (TREE_ASM_WRITTEN (decl));
1038
1039 current_function_decl = NULL;
1040 if (!cgraph_preserve_function_body_p (decl))
1041 {
1042 cgraph_release_function_body (node);
1043 /* Eliminate all call edges. This is important so the call_expr no longer
1044 points to the dead function body. */
1045 cgraph_node_remove_callees (node);
1046 }
1047
1048 cgraph_function_flags_ready = true;
1049 }
1050
1051 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
1052
1053 bool
1054 cgraph_inline_p (struct cgraph_edge *e, const char **reason)
1055 {
1056 *reason = e->inline_failed;
1057 return !e->inline_failed;
1058 }
1059
1060
1061
1062 /* Expand all functions that must be output.
1063
1064 Attempt to topologically sort the nodes so function is output when
1065 all called functions are already assembled to allow data to be
1066 propagated across the callgraph. Use a stack to get smaller distance
1067 between a function and its callees (later we may choose to use a more
1068 sophisticated algorithm for function reordering; we will likely want
1069 to use subsections to make the output functions appear in top-down
1070 order). */
1071
1072 static void
1073 cgraph_expand_all_functions (void)
1074 {
1075 struct cgraph_node *node;
1076 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1077 int order_pos, new_order_pos = 0;
1078 int i;
1079
1080 order_pos = cgraph_postorder (order);
1081 gcc_assert (order_pos == cgraph_n_nodes);
1082
1083 /* Garbage collector may remove inline clones we eliminate during
1084 optimization. So we must be sure to not reference them. */
1085 for (i = 0; i < order_pos; i++)
1086 if (order[i]->output)
1087 order[new_order_pos++] = order[i];
1088
1089 for (i = new_order_pos - 1; i >= 0; i--)
1090 {
1091 node = order[i];
1092 if (node->output)
1093 {
1094 gcc_assert (node->reachable);
1095 node->output = 0;
1096 cgraph_expand_function (node);
1097 }
1098 }
1099 cgraph_process_new_functions ();
1100
1101 free (order);
1102
1103 }
1104
1105 /* This is used to sort the node types by the cgraph order number. */
1106
1107 struct cgraph_order_sort
1108 {
1109 enum { ORDER_UNDEFINED = 0, ORDER_FUNCTION, ORDER_VAR, ORDER_ASM } kind;
1110 union
1111 {
1112 struct cgraph_node *f;
1113 struct varpool_node *v;
1114 struct cgraph_asm_node *a;
1115 } u;
1116 };
1117
1118 /* Output all functions, variables, and asm statements in the order
1119 according to their order fields, which is the order in which they
1120 appeared in the file. This implements -fno-toplevel-reorder. In
1121 this mode we may output functions and variables which don't really
1122 need to be output. */
1123
1124 static void
1125 cgraph_output_in_order (void)
1126 {
1127 int max;
1128 size_t size;
1129 struct cgraph_order_sort *nodes;
1130 int i;
1131 struct cgraph_node *pf;
1132 struct varpool_node *pv;
1133 struct cgraph_asm_node *pa;
1134
1135 max = cgraph_order;
1136 size = max * sizeof (struct cgraph_order_sort);
1137 nodes = (struct cgraph_order_sort *) alloca (size);
1138 memset (nodes, 0, size);
1139
1140 varpool_analyze_pending_decls ();
1141
1142 for (pf = cgraph_nodes; pf; pf = pf->next)
1143 {
1144 if (pf->output)
1145 {
1146 i = pf->order;
1147 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1148 nodes[i].kind = ORDER_FUNCTION;
1149 nodes[i].u.f = pf;
1150 }
1151 }
1152
1153 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
1154 {
1155 i = pv->order;
1156 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1157 nodes[i].kind = ORDER_VAR;
1158 nodes[i].u.v = pv;
1159 }
1160
1161 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1162 {
1163 i = pa->order;
1164 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1165 nodes[i].kind = ORDER_ASM;
1166 nodes[i].u.a = pa;
1167 }
1168
1169 /* In toplevel reorder mode we output all statics; mark them as needed. */
1170 for (i = 0; i < max; ++i)
1171 {
1172 if (nodes[i].kind == ORDER_VAR)
1173 {
1174 varpool_mark_needed_node (nodes[i].u.v);
1175 }
1176 }
1177 varpool_empty_needed_queue ();
1178
1179 for (i = 0; i < max; ++i)
1180 {
1181 switch (nodes[i].kind)
1182 {
1183 case ORDER_FUNCTION:
1184 nodes[i].u.f->output = 0;
1185 cgraph_expand_function (nodes[i].u.f);
1186 break;
1187
1188 case ORDER_VAR:
1189 varpool_assemble_decl (nodes[i].u.v);
1190 break;
1191
1192 case ORDER_ASM:
1193 assemble_asm (nodes[i].u.a->asm_str);
1194 break;
1195
1196 case ORDER_UNDEFINED:
1197 break;
1198
1199 default:
1200 gcc_unreachable ();
1201 }
1202 }
1203
1204 cgraph_asm_nodes = NULL;
1205 }
1206
1207 /* Return true when function body of DECL still needs to be kept around
1208 for later re-use. */
1209 bool
1210 cgraph_preserve_function_body_p (tree decl)
1211 {
1212 struct cgraph_node *node;
1213
1214 gcc_assert (cgraph_global_info_ready);
1215 /* Look if there is any clone around. */
1216 for (node = cgraph_node (decl); node; node = node->next_clone)
1217 if (node->global.inlined_to)
1218 return true;
1219 return false;
1220 }
1221
1222 static void
1223 ipa_passes (void)
1224 {
1225 set_cfun (NULL);
1226 current_function_decl = NULL;
1227 tree_register_cfg_hooks ();
1228 bitmap_obstack_initialize (NULL);
1229 execute_ipa_pass_list (all_ipa_passes);
1230 bitmap_obstack_release (NULL);
1231 }
1232
1233 /* Perform simple optimizations based on callgraph. */
1234
1235 void
1236 cgraph_optimize (void)
1237 {
1238 if (errorcount || sorrycount)
1239 return;
1240
1241 #ifdef ENABLE_CHECKING
1242 verify_cgraph ();
1243 #endif
1244
1245 /* Call functions declared with the "constructor" or "destructor"
1246 attribute. */
1247 cgraph_build_cdtor_fns ();
1248
1249 /* Frontend may output common variables after the unit has been finalized.
1250 It is safe to deal with them here as they are always zero initialized. */
1251 varpool_analyze_pending_decls ();
1252 cgraph_analyze_functions ();
1253
1254 timevar_push (TV_CGRAPHOPT);
1255 if (pre_ipa_mem_report)
1256 {
1257 fprintf (stderr, "Memory consumption before IPA\n");
1258 dump_memory_report (false);
1259 }
1260 if (!quiet_flag)
1261 fprintf (stderr, "Performing interprocedural optimizations\n");
1262 cgraph_state = CGRAPH_STATE_IPA;
1263
1264 /* Don't run the IPA passes if there was any error or sorry messages. */
1265 if (errorcount == 0 && sorrycount == 0)
1266 ipa_passes ();
1267
1268 /* This pass remove bodies of extern inline functions we never inlined.
1269 Do this later so other IPA passes see what is really going on. */
1270 cgraph_remove_unreachable_nodes (false, dump_file);
1271 cgraph_global_info_ready = true;
1272 if (cgraph_dump_file)
1273 {
1274 fprintf (cgraph_dump_file, "Optimized ");
1275 dump_cgraph (cgraph_dump_file);
1276 dump_varpool (cgraph_dump_file);
1277 }
1278 if (post_ipa_mem_report)
1279 {
1280 fprintf (stderr, "Memory consumption after IPA\n");
1281 dump_memory_report (false);
1282 }
1283 timevar_pop (TV_CGRAPHOPT);
1284
1285 /* Output everything. */
1286 if (!quiet_flag)
1287 fprintf (stderr, "Assembling functions:\n");
1288 #ifdef ENABLE_CHECKING
1289 verify_cgraph ();
1290 #endif
1291
1292 cgraph_mark_functions_to_output ();
1293
1294 cgraph_state = CGRAPH_STATE_EXPANSION;
1295 if (!flag_toplevel_reorder)
1296 cgraph_output_in_order ();
1297 else
1298 {
1299 cgraph_output_pending_asms ();
1300
1301 cgraph_expand_all_functions ();
1302 varpool_remove_unreferenced_decls ();
1303
1304 varpool_assemble_pending_decls ();
1305 }
1306 varpool_output_debug_info ();
1307 cgraph_process_new_functions ();
1308 cgraph_state = CGRAPH_STATE_FINISHED;
1309
1310 if (cgraph_dump_file)
1311 {
1312 fprintf (cgraph_dump_file, "\nFinal ");
1313 dump_cgraph (cgraph_dump_file);
1314 }
1315 #ifdef ENABLE_CHECKING
1316 verify_cgraph ();
1317 /* Double check that all inline clones are gone and that all
1318 function bodies have been released from memory. */
1319 if (!(sorrycount || errorcount))
1320 {
1321 struct cgraph_node *node;
1322 bool error_found = false;
1323
1324 for (node = cgraph_nodes; node; node = node->next)
1325 if (node->analyzed
1326 && (node->global.inlined_to
1327 || DECL_SAVED_TREE (node->decl)))
1328 {
1329 error_found = true;
1330 dump_cgraph_node (stderr, node);
1331 }
1332 if (error_found)
1333 internal_error ("nodes with unreleased memory found");
1334 }
1335 #endif
1336 }
1337 /* Generate and emit a static constructor or destructor. WHICH must
1338 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
1339 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
1340 initialization priority for this constructor or destructor. */
1341
1342 void
1343 cgraph_build_static_cdtor (char which, tree body, int priority)
1344 {
1345 static int counter = 0;
1346 char which_buf[16];
1347 tree decl, name, resdecl;
1348
1349 /* The priority is encoded in the constructor or destructor name.
1350 collect2 will sort the names and arrange that they are called at
1351 program startup. */
1352 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
1353 name = get_file_function_name (which_buf);
1354
1355 decl = build_decl (FUNCTION_DECL, name,
1356 build_function_type (void_type_node, void_list_node));
1357 current_function_decl = decl;
1358
1359 resdecl = build_decl (RESULT_DECL, NULL_TREE, void_type_node);
1360 DECL_ARTIFICIAL (resdecl) = 1;
1361 DECL_RESULT (decl) = resdecl;
1362
1363 allocate_struct_function (decl, false);
1364
1365 TREE_STATIC (decl) = 1;
1366 TREE_USED (decl) = 1;
1367 DECL_ARTIFICIAL (decl) = 1;
1368 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1369 DECL_SAVED_TREE (decl) = body;
1370 TREE_PUBLIC (decl) = ! targetm.have_ctors_dtors;
1371 DECL_UNINLINABLE (decl) = 1;
1372
1373 DECL_INITIAL (decl) = make_node (BLOCK);
1374 TREE_USED (DECL_INITIAL (decl)) = 1;
1375
1376 DECL_SOURCE_LOCATION (decl) = input_location;
1377 cfun->function_end_locus = input_location;
1378
1379 switch (which)
1380 {
1381 case 'I':
1382 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1383 decl_init_priority_insert (decl, priority);
1384 break;
1385 case 'D':
1386 DECL_STATIC_DESTRUCTOR (decl) = 1;
1387 decl_fini_priority_insert (decl, priority);
1388 break;
1389 default:
1390 gcc_unreachable ();
1391 }
1392
1393 gimplify_function_tree (decl);
1394
1395 cgraph_add_new_function (decl, false);
1396 cgraph_mark_needed_node (cgraph_node (decl));
1397 set_cfun (NULL);
1398 }
1399
1400 void
1401 init_cgraph (void)
1402 {
1403 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1404 }
1405
1406 /* The edges representing the callers of the NEW_VERSION node were
1407 fixed by cgraph_function_versioning (), now the call_expr in their
1408 respective tree code should be updated to call the NEW_VERSION. */
1409
1410 static void
1411 update_call_expr (struct cgraph_node *new_version)
1412 {
1413 struct cgraph_edge *e;
1414
1415 gcc_assert (new_version);
1416 for (e = new_version->callers; e; e = e->next_caller)
1417 /* Update the call expr on the edges
1418 to call the new version. */
1419 TREE_OPERAND (CALL_EXPR_FN (get_call_expr_in (e->call_stmt)), 0) = new_version->decl;
1420 }
1421
1422
1423 /* Create a new cgraph node which is the new version of
1424 OLD_VERSION node. REDIRECT_CALLERS holds the callers
1425 edges which should be redirected to point to
1426 NEW_VERSION. ALL the callees edges of OLD_VERSION
1427 are cloned to the new version node. Return the new
1428 version node. */
1429
1430 static struct cgraph_node *
1431 cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
1432 tree new_decl,
1433 VEC(cgraph_edge_p,heap) *redirect_callers)
1434 {
1435 struct cgraph_node *new_version;
1436 struct cgraph_edge *e, *new_e;
1437 struct cgraph_edge *next_callee;
1438 unsigned i;
1439
1440 gcc_assert (old_version);
1441
1442 new_version = cgraph_node (new_decl);
1443
1444 new_version->analyzed = true;
1445 new_version->local = old_version->local;
1446 new_version->global = old_version->global;
1447 new_version->rtl = new_version->rtl;
1448 new_version->reachable = true;
1449 new_version->count = old_version->count;
1450
1451 /* Clone the old node callees. Recursive calls are
1452 also cloned. */
1453 for (e = old_version->callees;e; e=e->next_callee)
1454 {
1455 new_e = cgraph_clone_edge (e, new_version, e->call_stmt, 0, e->frequency,
1456 e->loop_nest, true);
1457 new_e->count = e->count;
1458 }
1459 /* Fix recursive calls.
1460 If OLD_VERSION has a recursive call after the
1461 previous edge cloning, the new version will have an edge
1462 pointing to the old version, which is wrong;
1463 Redirect it to point to the new version. */
1464 for (e = new_version->callees ; e; e = next_callee)
1465 {
1466 next_callee = e->next_callee;
1467 if (e->callee == old_version)
1468 cgraph_redirect_edge_callee (e, new_version);
1469
1470 if (!next_callee)
1471 break;
1472 }
1473 for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
1474 {
1475 /* Redirect calls to the old version node to point to its new
1476 version. */
1477 cgraph_redirect_edge_callee (e, new_version);
1478 }
1479
1480 return new_version;
1481 }
1482
1483 /* Perform function versioning.
1484 Function versioning includes copying of the tree and
1485 a callgraph update (creating a new cgraph node and updating
1486 its callees and callers).
1487
1488 REDIRECT_CALLERS varray includes the edges to be redirected
1489 to the new version.
1490
1491 TREE_MAP is a mapping of tree nodes we want to replace with
1492 new ones (according to results of prior analysis).
1493 OLD_VERSION_NODE is the node that is versioned.
1494 It returns the new version's cgraph node. */
1495
1496 struct cgraph_node *
1497 cgraph_function_versioning (struct cgraph_node *old_version_node,
1498 VEC(cgraph_edge_p,heap) *redirect_callers,
1499 varray_type tree_map)
1500 {
1501 tree old_decl = old_version_node->decl;
1502 struct cgraph_node *new_version_node = NULL;
1503 tree new_decl;
1504
1505 if (!tree_versionable_function_p (old_decl))
1506 return NULL;
1507
1508 /* Make a new FUNCTION_DECL tree node for the
1509 new version. */
1510 new_decl = copy_node (old_decl);
1511
1512 /* Create the new version's call-graph node.
1513 and update the edges of the new node. */
1514 new_version_node =
1515 cgraph_copy_node_for_versioning (old_version_node, new_decl,
1516 redirect_callers);
1517
1518 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1519 tree_function_versioning (old_decl, new_decl, tree_map, false);
1520 /* Update the call_expr on the edges to call the new version node. */
1521 update_call_expr (new_version_node);
1522
1523 /* Update the new version's properties.
1524 Make The new version visible only within this translation unit.
1525 ??? We cannot use COMDAT linkage because there is no
1526 ABI support for this. */
1527 DECL_EXTERNAL (new_version_node->decl) = 0;
1528 DECL_ONE_ONLY (new_version_node->decl) = 0;
1529 TREE_PUBLIC (new_version_node->decl) = 0;
1530 DECL_COMDAT (new_version_node->decl) = 0;
1531 new_version_node->local.externally_visible = 0;
1532 new_version_node->local.local = 1;
1533 new_version_node->lowered = true;
1534 return new_version_node;
1535 }
1536
1537 /* Produce separate function body for inline clones so the offline copy can be
1538 modified without affecting them. */
1539 struct cgraph_node *
1540 save_inline_function_body (struct cgraph_node *node)
1541 {
1542 struct cgraph_node *first_clone;
1543
1544 gcc_assert (node == cgraph_node (node->decl));
1545
1546 cgraph_lower_function (node);
1547
1548 first_clone = node->next_clone;
1549
1550 first_clone->decl = copy_node (node->decl);
1551 node->next_clone = NULL;
1552 first_clone->prev_clone = NULL;
1553 cgraph_insert_node_to_hashtable (first_clone);
1554 gcc_assert (first_clone == cgraph_node (first_clone->decl));
1555
1556 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1557 tree_function_versioning (node->decl, first_clone->decl, NULL, true);
1558
1559 DECL_EXTERNAL (first_clone->decl) = 0;
1560 DECL_ONE_ONLY (first_clone->decl) = 0;
1561 TREE_PUBLIC (first_clone->decl) = 0;
1562 DECL_COMDAT (first_clone->decl) = 0;
1563
1564 for (node = first_clone->next_clone; node; node = node->next_clone)
1565 node->decl = first_clone->decl;
1566 #ifdef ENABLE_CHECKING
1567 verify_cgraph_node (first_clone);
1568 #endif
1569 return first_clone;
1570 }
1571
1572 #include "gt-cgraphunit.h"