backport: ChangeLog.tuples: ChangeLog from gimple-tuples-branch.
[gcc.git] / gcc / cgraphunit.c
1 /* Callgraph based interprocedural optimizations.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This module implements main driver of compilation process as well as
23 few basic interprocedural optimizers.
24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
35 (There is one exception needed for implementing GCC extern inline
36 function.)
37
38 - varpool_finalize_variable
39
40 This function has same behavior as the above but is used for static
41 variables.
42
43 - cgraph_finalize_compilation_unit
44
45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
47
48 In the the call-graph construction and local function
49 analysis takes place here. Bodies of unreachable functions are released
50 to conserve memory usage.
51
52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
54
55 - cgraph_optimize
56
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
61
62 - cgraph_mark_needed_node
63 - varpool_mark_needed_node
64
65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
69 used by C++ frontend to explicitly mark the keyed methods.
70
71 - analyze_expr callback
72
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
76
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
79
80 Analyzing of all functions is deferred
81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
82
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
90
91 The intra-procedural information is produced and its existence
92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
95
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
98
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
103
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
106 where reference has been optimized out. */
107
108
109 #include "config.h"
110 #include "system.h"
111 #include "coretypes.h"
112 #include "tm.h"
113 #include "tree.h"
114 #include "rtl.h"
115 #include "tree-flow.h"
116 #include "tree-inline.h"
117 #include "langhooks.h"
118 #include "pointer-set.h"
119 #include "toplev.h"
120 #include "flags.h"
121 #include "ggc.h"
122 #include "debug.h"
123 #include "target.h"
124 #include "cgraph.h"
125 #include "diagnostic.h"
126 #include "timevar.h"
127 #include "params.h"
128 #include "fibheap.h"
129 #include "c-common.h"
130 #include "intl.h"
131 #include "function.h"
132 #include "ipa-prop.h"
133 #include "gimple.h"
134 #include "tree-iterator.h"
135 #include "tree-pass.h"
136 #include "output.h"
137
138 static void cgraph_expand_all_functions (void);
139 static void cgraph_mark_functions_to_output (void);
140 static void cgraph_expand_function (struct cgraph_node *);
141 static void cgraph_output_pending_asms (void);
142
143 static FILE *cgraph_dump_file;
144
145 /* A vector of FUNCTION_DECLs declared as static constructors. */
146 static GTY (()) VEC(tree, gc) *static_ctors;
147 /* A vector of FUNCTION_DECLs declared as static destructors. */
148 static GTY (()) VEC(tree, gc) *static_dtors;
149
150 /* When target does not have ctors and dtors, we call all constructor
151 and destructor by special initialization/destruction function
152 recognized by collect2.
153
154 When we are going to build this function, collect all constructors and
155 destructors and turn them into normal functions. */
156
157 static void
158 record_cdtor_fn (tree fndecl)
159 {
160 struct cgraph_node *node;
161 if (targetm.have_ctors_dtors
162 || (!DECL_STATIC_CONSTRUCTOR (fndecl)
163 && !DECL_STATIC_DESTRUCTOR (fndecl)))
164 return;
165
166 if (DECL_STATIC_CONSTRUCTOR (fndecl))
167 {
168 VEC_safe_push (tree, gc, static_ctors, fndecl);
169 DECL_STATIC_CONSTRUCTOR (fndecl) = 0;
170 }
171 if (DECL_STATIC_DESTRUCTOR (fndecl))
172 {
173 VEC_safe_push (tree, gc, static_dtors, fndecl);
174 DECL_STATIC_DESTRUCTOR (fndecl) = 0;
175 }
176 node = cgraph_node (fndecl);
177 node->local.disregard_inline_limits = 1;
178 cgraph_mark_reachable_node (node);
179 }
180
181 /* Define global constructors/destructor functions for the CDTORS, of
182 which they are LEN. The CDTORS are sorted by initialization
183 priority. If CTOR_P is true, these are constructors; otherwise,
184 they are destructors. */
185
186 static void
187 build_cdtor (bool ctor_p, tree *cdtors, size_t len)
188 {
189 size_t i;
190
191 i = 0;
192 while (i < len)
193 {
194 tree body;
195 tree fn;
196 priority_type priority;
197
198 priority = 0;
199 body = NULL_TREE;
200 /* Find the next batch of constructors/destructors with the same
201 initialization priority. */
202 do
203 {
204 priority_type p;
205 fn = cdtors[i];
206 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
207 if (!body)
208 priority = p;
209 else if (p != priority)
210 break;
211 append_to_statement_list (build_function_call_expr (fn, 0),
212 &body);
213 ++i;
214 }
215 while (i < len);
216 gcc_assert (body != NULL_TREE);
217 /* Generate a function to call all the function of like
218 priority. */
219 cgraph_build_static_cdtor (ctor_p ? 'I' : 'D', body, priority);
220 }
221 }
222
223 /* Comparison function for qsort. P1 and P2 are actually of type
224 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
225 used to determine the sort order. */
226
227 static int
228 compare_ctor (const void *p1, const void *p2)
229 {
230 tree f1;
231 tree f2;
232 int priority1;
233 int priority2;
234
235 f1 = *(const tree *)p1;
236 f2 = *(const tree *)p2;
237 priority1 = DECL_INIT_PRIORITY (f1);
238 priority2 = DECL_INIT_PRIORITY (f2);
239
240 if (priority1 < priority2)
241 return -1;
242 else if (priority1 > priority2)
243 return 1;
244 else
245 /* Ensure a stable sort. */
246 return (const tree *)p1 - (const tree *)p2;
247 }
248
249 /* Comparison function for qsort. P1 and P2 are actually of type
250 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
251 used to determine the sort order. */
252
253 static int
254 compare_dtor (const void *p1, const void *p2)
255 {
256 tree f1;
257 tree f2;
258 int priority1;
259 int priority2;
260
261 f1 = *(const tree *)p1;
262 f2 = *(const tree *)p2;
263 priority1 = DECL_FINI_PRIORITY (f1);
264 priority2 = DECL_FINI_PRIORITY (f2);
265
266 if (priority1 < priority2)
267 return -1;
268 else if (priority1 > priority2)
269 return 1;
270 else
271 /* Ensure a stable sort. */
272 return (const tree *)p1 - (const tree *)p2;
273 }
274
275 /* Generate functions to call static constructors and destructors
276 for targets that do not support .ctors/.dtors sections. These
277 functions have magic names which are detected by collect2. */
278
279 static void
280 cgraph_build_cdtor_fns (void)
281 {
282 if (!VEC_empty (tree, static_ctors))
283 {
284 gcc_assert (!targetm.have_ctors_dtors);
285 qsort (VEC_address (tree, static_ctors),
286 VEC_length (tree, static_ctors),
287 sizeof (tree),
288 compare_ctor);
289 build_cdtor (/*ctor_p=*/true,
290 VEC_address (tree, static_ctors),
291 VEC_length (tree, static_ctors));
292 VEC_truncate (tree, static_ctors, 0);
293 }
294
295 if (!VEC_empty (tree, static_dtors))
296 {
297 gcc_assert (!targetm.have_ctors_dtors);
298 qsort (VEC_address (tree, static_dtors),
299 VEC_length (tree, static_dtors),
300 sizeof (tree),
301 compare_dtor);
302 build_cdtor (/*ctor_p=*/false,
303 VEC_address (tree, static_dtors),
304 VEC_length (tree, static_dtors));
305 VEC_truncate (tree, static_dtors, 0);
306 }
307 }
308
309 /* Determine if function DECL is needed. That is, visible to something
310 either outside this translation unit, something magic in the system
311 configury. */
312
313 static bool
314 decide_is_function_needed (struct cgraph_node *node, tree decl)
315 {
316 if (MAIN_NAME_P (DECL_NAME (decl))
317 && TREE_PUBLIC (decl))
318 {
319 node->local.externally_visible = true;
320 return true;
321 }
322
323 /* If the user told us it is used, then it must be so. */
324 if (node->local.externally_visible)
325 return true;
326
327 /* ??? If the assembler name is set by hand, it is possible to assemble
328 the name later after finalizing the function and the fact is noticed
329 in assemble_name then. This is arguably a bug. */
330 if (DECL_ASSEMBLER_NAME_SET_P (decl)
331 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
332 return true;
333
334 /* With -fkeep-inline-functions we are keeping all inline functions except
335 for extern inline ones. */
336 if (flag_keep_inline_functions
337 && DECL_DECLARED_INLINE_P (decl)
338 && !DECL_EXTERNAL (decl)
339 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl)))
340 return true;
341
342 /* If we decided it was needed before, but at the time we didn't have
343 the body of the function available, then it's still needed. We have
344 to go back and re-check its dependencies now. */
345 if (node->needed)
346 return true;
347
348 /* Externally visible functions must be output. The exception is
349 COMDAT functions that must be output only when they are needed.
350
351 When not optimizing, also output the static functions. (see
352 PR24561), but don't do so for always_inline functions, functions
353 declared inline and nested functions. These was optimized out
354 in the original implementation and it is unclear whether we want
355 to change the behavior here. */
356 if (((TREE_PUBLIC (decl)
357 || (!optimize && !node->local.disregard_inline_limits
358 && !DECL_DECLARED_INLINE_P (decl)
359 && !node->origin))
360 && !flag_whole_program)
361 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
362 return true;
363
364 /* Constructors and destructors are reachable from the runtime by
365 some mechanism. */
366 if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
367 return true;
368
369 return false;
370 }
371
372 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
373 functions into callgraph in a way so they look like ordinary reachable
374 functions inserted into callgraph already at construction time. */
375
376 bool
377 cgraph_process_new_functions (void)
378 {
379 bool output = false;
380 tree fndecl;
381 struct cgraph_node *node;
382
383 /* Note that this queue may grow as its being processed, as the new
384 functions may generate new ones. */
385 while (cgraph_new_nodes)
386 {
387 node = cgraph_new_nodes;
388 fndecl = node->decl;
389 cgraph_new_nodes = cgraph_new_nodes->next_needed;
390 switch (cgraph_state)
391 {
392 case CGRAPH_STATE_CONSTRUCTION:
393 /* At construction time we just need to finalize function and move
394 it into reachable functions list. */
395
396 node->next_needed = NULL;
397 cgraph_finalize_function (fndecl, false);
398 cgraph_mark_reachable_node (node);
399 output = true;
400 break;
401
402 case CGRAPH_STATE_IPA:
403 case CGRAPH_STATE_IPA_SSA:
404 /* When IPA optimization already started, do all essential
405 transformations that has been already performed on the whole
406 cgraph but not on this function. */
407
408 gimple_register_cfg_hooks ();
409 if (!node->analyzed)
410 cgraph_analyze_function (node);
411 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
412 current_function_decl = fndecl;
413 compute_inline_parameters (node);
414 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
415 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
416 /* When not optimizing, be sure we run early local passes anyway
417 to expand OMP. */
418 || !optimize)
419 execute_pass_list (pass_early_local_passes.pass.sub);
420 free_dominance_info (CDI_POST_DOMINATORS);
421 free_dominance_info (CDI_DOMINATORS);
422 pop_cfun ();
423 current_function_decl = NULL;
424 break;
425
426 case CGRAPH_STATE_EXPANSION:
427 /* Functions created during expansion shall be compiled
428 directly. */
429 node->output = 0;
430 cgraph_expand_function (node);
431 break;
432
433 default:
434 gcc_unreachable ();
435 break;
436 }
437 }
438 return output;
439 }
440
441 /* As an GCC extension we allow redefinition of the function. The
442 semantics when both copies of bodies differ is not well defined.
443 We replace the old body with new body so in unit at a time mode
444 we always use new body, while in normal mode we may end up with
445 old body inlined into some functions and new body expanded and
446 inlined in others.
447
448 ??? It may make more sense to use one body for inlining and other
449 body for expanding the function but this is difficult to do. */
450
451 static void
452 cgraph_reset_node (struct cgraph_node *node)
453 {
454 /* If node->output is set, then we have already begun whole-unit analysis.
455 This is *not* testing for whether we've already emitted the function.
456 That case can be sort-of legitimately seen with real function redefinition
457 errors. I would argue that the front end should never present us with
458 such a case, but don't enforce that for now. */
459 gcc_assert (!node->output);
460
461 /* Reset our data structures so we can analyze the function again. */
462 memset (&node->local, 0, sizeof (node->local));
463 memset (&node->global, 0, sizeof (node->global));
464 memset (&node->rtl, 0, sizeof (node->rtl));
465 node->analyzed = false;
466 node->local.redefined_extern_inline = true;
467 node->local.finalized = false;
468
469 cgraph_node_remove_callees (node);
470
471 /* We may need to re-queue the node for assembling in case
472 we already proceeded it and ignored as not needed or got
473 a re-declaration in IMA mode. */
474 if (node->reachable)
475 {
476 struct cgraph_node *n;
477
478 for (n = cgraph_nodes_queue; n; n = n->next_needed)
479 if (n == node)
480 break;
481 if (!n)
482 node->reachable = 0;
483 }
484 }
485
486 static void
487 cgraph_lower_function (struct cgraph_node *node)
488 {
489 if (node->lowered)
490 return;
491 tree_lowering_passes (node->decl);
492 node->lowered = true;
493 }
494
495 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
496 logic in effect. If NESTED is true, then our caller cannot stand to have
497 the garbage collector run at the moment. We would need to either create
498 a new GC context, or just not compile right now. */
499
500 void
501 cgraph_finalize_function (tree decl, bool nested)
502 {
503 struct cgraph_node *node = cgraph_node (decl);
504
505 if (node->local.finalized)
506 cgraph_reset_node (node);
507
508 node->pid = cgraph_max_pid ++;
509 notice_global_symbol (decl);
510 node->local.finalized = true;
511 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
512 record_cdtor_fn (node->decl);
513 if (node->nested)
514 lower_nested_functions (decl);
515 gcc_assert (!node->nested);
516
517 if (decide_is_function_needed (node, decl))
518 cgraph_mark_needed_node (node);
519
520 /* Since we reclaim unreachable nodes at the end of every language
521 level unit, we need to be conservative about possible entry points
522 there. */
523 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)))
524 cgraph_mark_reachable_node (node);
525
526 /* If we've not yet emitted decl, tell the debug info about it. */
527 if (!TREE_ASM_WRITTEN (decl))
528 (*debug_hooks->deferred_inline_function) (decl);
529
530 /* Possibly warn about unused parameters. */
531 if (warn_unused_parameter)
532 do_warn_unused_parameter (decl);
533
534 if (!nested)
535 ggc_collect ();
536 }
537
538 /* C99 extern inline keywords allow changing of declaration after function
539 has been finalized. We need to re-decide if we want to mark the function as
540 needed then. */
541
542 void
543 cgraph_mark_if_needed (tree decl)
544 {
545 struct cgraph_node *node = cgraph_node (decl);
546 if (node->local.finalized && decide_is_function_needed (node, decl))
547 cgraph_mark_needed_node (node);
548 }
549
550 /* Verify cgraph nodes of given cgraph node. */
551 void
552 verify_cgraph_node (struct cgraph_node *node)
553 {
554 struct cgraph_edge *e;
555 struct cgraph_node *main_clone;
556 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
557 struct function *saved_cfun = cfun;
558 basic_block this_block;
559 gimple_stmt_iterator gsi;
560 bool error_found = false;
561
562 if (errorcount || sorrycount)
563 return;
564
565 timevar_push (TV_CGRAPH_VERIFY);
566 /* debug_generic_stmt needs correct cfun */
567 set_cfun (this_cfun);
568 for (e = node->callees; e; e = e->next_callee)
569 if (e->aux)
570 {
571 error ("aux field set for edge %s->%s",
572 cgraph_node_name (e->caller), cgraph_node_name (e->callee));
573 error_found = true;
574 }
575 if (node->count < 0)
576 {
577 error ("Execution count is negative");
578 error_found = true;
579 }
580 for (e = node->callers; e; e = e->next_caller)
581 {
582 if (e->count < 0)
583 {
584 error ("caller edge count is negative");
585 error_found = true;
586 }
587 if (e->frequency < 0)
588 {
589 error ("caller edge frequency is negative");
590 error_found = true;
591 }
592 if (e->frequency > CGRAPH_FREQ_MAX)
593 {
594 error ("caller edge frequency is too large");
595 error_found = true;
596 }
597 if (!e->inline_failed)
598 {
599 if (node->global.inlined_to
600 != (e->caller->global.inlined_to
601 ? e->caller->global.inlined_to : e->caller))
602 {
603 error ("inlined_to pointer is wrong");
604 error_found = true;
605 }
606 if (node->callers->next_caller)
607 {
608 error ("multiple inline callers");
609 error_found = true;
610 }
611 }
612 else
613 if (node->global.inlined_to)
614 {
615 error ("inlined_to pointer set for noninline callers");
616 error_found = true;
617 }
618 }
619 if (!node->callers && node->global.inlined_to)
620 {
621 error ("inlined_to pointer is set but no predecessors found");
622 error_found = true;
623 }
624 if (node->global.inlined_to == node)
625 {
626 error ("inlined_to pointer refers to itself");
627 error_found = true;
628 }
629
630 for (main_clone = cgraph_node (node->decl); main_clone;
631 main_clone = main_clone->next_clone)
632 if (main_clone == node)
633 break;
634 if (!cgraph_node (node->decl))
635 {
636 error ("node not found in cgraph_hash");
637 error_found = true;
638 }
639
640 if (node->analyzed
641 && gimple_body (node->decl)
642 && !TREE_ASM_WRITTEN (node->decl)
643 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to))
644 {
645 if (this_cfun->cfg)
646 {
647 /* The nodes we're interested in are never shared, so walk
648 the tree ignoring duplicates. */
649 struct pointer_set_t *visited_nodes = pointer_set_create ();
650 /* Reach the trees by walking over the CFG, and note the
651 enclosing basic-blocks in the call edges. */
652 FOR_EACH_BB_FN (this_block, this_cfun)
653 for (gsi = gsi_start_bb (this_block);
654 !gsi_end_p (gsi);
655 gsi_next (&gsi))
656 {
657 gimple stmt = gsi_stmt (gsi);
658 tree decl;
659 if (is_gimple_call (stmt) && (decl = gimple_call_fndecl (stmt)))
660 {
661 struct cgraph_edge *e = cgraph_edge (node, stmt);
662 if (e)
663 {
664 if (e->aux)
665 {
666 error ("shared call_stmt:");
667 debug_gimple_stmt (stmt);
668 error_found = true;
669 }
670 if (e->callee->decl != cgraph_node (decl)->decl
671 && e->inline_failed)
672 {
673 error ("edge points to wrong declaration:");
674 debug_tree (e->callee->decl);
675 fprintf (stderr," Instead of:");
676 debug_tree (decl);
677 }
678 e->aux = (void *)1;
679 }
680 else
681 {
682 error ("missing callgraph edge for call stmt:");
683 debug_gimple_stmt (stmt);
684 error_found = true;
685 }
686 }
687 }
688 pointer_set_destroy (visited_nodes);
689 }
690 else
691 /* No CFG available?! */
692 gcc_unreachable ();
693
694 for (e = node->callees; e; e = e->next_callee)
695 {
696 if (!e->aux && !e->indirect_call)
697 {
698 error ("edge %s->%s has no corresponding call_stmt",
699 cgraph_node_name (e->caller),
700 cgraph_node_name (e->callee));
701 debug_gimple_stmt (e->call_stmt);
702 error_found = true;
703 }
704 e->aux = 0;
705 }
706 }
707 if (error_found)
708 {
709 dump_cgraph_node (stderr, node);
710 internal_error ("verify_cgraph_node failed");
711 }
712 set_cfun (saved_cfun);
713 timevar_pop (TV_CGRAPH_VERIFY);
714 }
715
716 /* Verify whole cgraph structure. */
717 void
718 verify_cgraph (void)
719 {
720 struct cgraph_node *node;
721
722 if (sorrycount || errorcount)
723 return;
724
725 for (node = cgraph_nodes; node; node = node->next)
726 verify_cgraph_node (node);
727 }
728
729 /* Output all asm statements we have stored up to be output. */
730
731 static void
732 cgraph_output_pending_asms (void)
733 {
734 struct cgraph_asm_node *can;
735
736 if (errorcount || sorrycount)
737 return;
738
739 for (can = cgraph_asm_nodes; can; can = can->next)
740 assemble_asm (can->asm_str);
741 cgraph_asm_nodes = NULL;
742 }
743
744 /* Analyze the function scheduled to be output. */
745 void
746 cgraph_analyze_function (struct cgraph_node *node)
747 {
748 tree decl = node->decl;
749
750 current_function_decl = decl;
751 push_cfun (DECL_STRUCT_FUNCTION (decl));
752 cgraph_lower_function (node);
753 node->analyzed = true;
754
755 pop_cfun ();
756 current_function_decl = NULL;
757 }
758
759 /* Look for externally_visible and used attributes and mark cgraph nodes
760 accordingly.
761
762 We cannot mark the nodes at the point the attributes are processed (in
763 handle_*_attribute) because the copy of the declarations available at that
764 point may not be canonical. For example, in:
765
766 void f();
767 void f() __attribute__((used));
768
769 the declaration we see in handle_used_attribute will be the second
770 declaration -- but the front end will subsequently merge that declaration
771 with the original declaration and discard the second declaration.
772
773 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
774
775 void f() {}
776 void f() __attribute__((externally_visible));
777
778 is valid.
779
780 So, we walk the nodes at the end of the translation unit, applying the
781 attributes at that point. */
782
783 static void
784 process_function_and_variable_attributes (struct cgraph_node *first,
785 struct varpool_node *first_var)
786 {
787 struct cgraph_node *node;
788 struct varpool_node *vnode;
789
790 for (node = cgraph_nodes; node != first; node = node->next)
791 {
792 tree decl = node->decl;
793 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
794 {
795 mark_decl_referenced (decl);
796 if (node->local.finalized)
797 cgraph_mark_needed_node (node);
798 }
799 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
800 {
801 if (! TREE_PUBLIC (node->decl))
802 warning (OPT_Wattributes,
803 "%J%<externally_visible%> attribute have effect only on public objects",
804 node->decl);
805 else
806 {
807 if (node->local.finalized)
808 cgraph_mark_needed_node (node);
809 node->local.externally_visible = true;
810 }
811 }
812 }
813 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
814 {
815 tree decl = vnode->decl;
816 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
817 {
818 mark_decl_referenced (decl);
819 if (vnode->finalized)
820 varpool_mark_needed_node (vnode);
821 }
822 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
823 {
824 if (! TREE_PUBLIC (vnode->decl))
825 warning (OPT_Wattributes,
826 "%J%<externally_visible%> attribute have effect only on public objects",
827 vnode->decl);
828 else
829 {
830 if (vnode->finalized)
831 varpool_mark_needed_node (vnode);
832 vnode->externally_visible = true;
833 }
834 }
835 }
836 }
837
838 /* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
839 each reachable functions) and build cgraph.
840 The function can be called multiple times after inserting new nodes
841 into beginning of queue. Just the new part of queue is re-scanned then. */
842
843 static void
844 cgraph_analyze_functions (void)
845 {
846 /* Keep track of already processed nodes when called multiple times for
847 intermodule optimization. */
848 static struct cgraph_node *first_analyzed;
849 struct cgraph_node *first_processed = first_analyzed;
850 static struct varpool_node *first_analyzed_var;
851 struct cgraph_node *node, *next;
852
853 process_function_and_variable_attributes (first_processed,
854 first_analyzed_var);
855 first_processed = cgraph_nodes;
856 first_analyzed_var = varpool_nodes;
857 varpool_analyze_pending_decls ();
858 if (cgraph_dump_file)
859 {
860 fprintf (cgraph_dump_file, "Initial entry points:");
861 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
862 if (node->needed && gimple_body (node->decl))
863 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
864 fprintf (cgraph_dump_file, "\n");
865 }
866 cgraph_process_new_functions ();
867
868 /* Propagate reachability flag and lower representation of all reachable
869 functions. In the future, lowering will introduce new functions and
870 new entry points on the way (by template instantiation and virtual
871 method table generation for instance). */
872 while (cgraph_nodes_queue)
873 {
874 struct cgraph_edge *edge;
875 tree decl = cgraph_nodes_queue->decl;
876
877 node = cgraph_nodes_queue;
878 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
879 node->next_needed = NULL;
880
881 /* ??? It is possible to create extern inline function and later using
882 weak alias attribute to kill its body. See
883 gcc.c-torture/compile/20011119-1.c */
884 if (!DECL_STRUCT_FUNCTION (decl))
885 {
886 cgraph_reset_node (node);
887 continue;
888 }
889
890 gcc_assert (!node->analyzed && node->reachable);
891 gcc_assert (gimple_body (decl));
892
893 cgraph_analyze_function (node);
894
895 for (edge = node->callees; edge; edge = edge->next_callee)
896 if (!edge->callee->reachable)
897 cgraph_mark_reachable_node (edge->callee);
898
899 /* We finalize local static variables during constructing callgraph
900 edges. Process their attributes too. */
901 process_function_and_variable_attributes (first_processed,
902 first_analyzed_var);
903 first_processed = cgraph_nodes;
904 first_analyzed_var = varpool_nodes;
905 varpool_analyze_pending_decls ();
906 cgraph_process_new_functions ();
907 }
908
909 /* Collect entry points to the unit. */
910 if (cgraph_dump_file)
911 {
912 fprintf (cgraph_dump_file, "Unit entry points:");
913 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
914 if (node->needed && gimple_body (node->decl))
915 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
916 fprintf (cgraph_dump_file, "\n\nInitial ");
917 dump_cgraph (cgraph_dump_file);
918 }
919
920 if (cgraph_dump_file)
921 fprintf (cgraph_dump_file, "\nReclaiming functions:");
922
923 for (node = cgraph_nodes; node != first_analyzed; node = next)
924 {
925 tree decl = node->decl;
926 next = node->next;
927
928 if (node->local.finalized && !gimple_body (decl))
929 cgraph_reset_node (node);
930
931 if (!node->reachable && gimple_body (decl))
932 {
933 if (cgraph_dump_file)
934 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
935 cgraph_remove_node (node);
936 continue;
937 }
938 else
939 node->next_needed = NULL;
940 gcc_assert (!node->local.finalized || gimple_body (decl));
941 gcc_assert (node->analyzed == node->local.finalized);
942 }
943 if (cgraph_dump_file)
944 {
945 fprintf (cgraph_dump_file, "\n\nReclaimed ");
946 dump_cgraph (cgraph_dump_file);
947 }
948 first_analyzed = cgraph_nodes;
949 ggc_collect ();
950 }
951
952 /* Analyze the whole compilation unit once it is parsed completely. */
953
954 void
955 cgraph_finalize_compilation_unit (void)
956 {
957 if (errorcount || sorrycount)
958 return;
959
960 finish_aliases_1 ();
961
962 if (!quiet_flag)
963 {
964 fprintf (stderr, "\nAnalyzing compilation unit\n");
965 fflush (stderr);
966 }
967
968 timevar_push (TV_CGRAPH);
969 cgraph_analyze_functions ();
970 timevar_pop (TV_CGRAPH);
971 }
972 /* Figure out what functions we want to assemble. */
973
974 static void
975 cgraph_mark_functions_to_output (void)
976 {
977 struct cgraph_node *node;
978
979 for (node = cgraph_nodes; node; node = node->next)
980 {
981 tree decl = node->decl;
982 struct cgraph_edge *e;
983
984 gcc_assert (!node->output);
985
986 for (e = node->callers; e; e = e->next_caller)
987 if (e->inline_failed)
988 break;
989
990 /* We need to output all local functions that are used and not
991 always inlined, as well as those that are reachable from
992 outside the current compilation unit. */
993 if (gimple_body (decl)
994 && !node->global.inlined_to
995 && (node->needed
996 || (e && node->reachable))
997 && !TREE_ASM_WRITTEN (decl)
998 && !DECL_EXTERNAL (decl))
999 node->output = 1;
1000 else
1001 {
1002 /* We should've reclaimed all functions that are not needed. */
1003 #ifdef ENABLE_CHECKING
1004 if (!node->global.inlined_to
1005 && gimple_body (decl)
1006 && !DECL_EXTERNAL (decl))
1007 {
1008 dump_cgraph_node (stderr, node);
1009 internal_error ("failed to reclaim unneeded function");
1010 }
1011 #endif
1012 gcc_assert (node->global.inlined_to
1013 || !gimple_body (decl)
1014 || DECL_EXTERNAL (decl));
1015
1016 }
1017
1018 }
1019 }
1020
1021 /* Expand function specified by NODE. */
1022
1023 static void
1024 cgraph_expand_function (struct cgraph_node *node)
1025 {
1026 tree decl = node->decl;
1027
1028 /* We ought to not compile any inline clones. */
1029 gcc_assert (!node->global.inlined_to);
1030
1031 announce_function (decl);
1032
1033 gcc_assert (node->lowered);
1034
1035 /* Generate RTL for the body of DECL. */
1036 if (lang_hooks.callgraph.emit_associated_thunks)
1037 lang_hooks.callgraph.emit_associated_thunks (decl);
1038 tree_rest_of_compilation (decl);
1039
1040 /* Make sure that BE didn't give up on compiling. */
1041 /* ??? Can happen with nested function of extern inline. */
1042 gcc_assert (TREE_ASM_WRITTEN (decl));
1043 current_function_decl = NULL;
1044 if (!cgraph_preserve_function_body_p (decl))
1045 {
1046 cgraph_release_function_body (node);
1047 /* Eliminate all call edges. This is important so the call_expr no longer
1048 points to the dead function body. */
1049 cgraph_node_remove_callees (node);
1050 }
1051
1052 cgraph_function_flags_ready = true;
1053 }
1054
1055 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
1056
1057 bool
1058 cgraph_inline_p (struct cgraph_edge *e, const char **reason)
1059 {
1060 *reason = e->inline_failed;
1061 return !e->inline_failed;
1062 }
1063
1064
1065
1066 /* Expand all functions that must be output.
1067
1068 Attempt to topologically sort the nodes so function is output when
1069 all called functions are already assembled to allow data to be
1070 propagated across the callgraph. Use a stack to get smaller distance
1071 between a function and its callees (later we may choose to use a more
1072 sophisticated algorithm for function reordering; we will likely want
1073 to use subsections to make the output functions appear in top-down
1074 order). */
1075
1076 static void
1077 cgraph_expand_all_functions (void)
1078 {
1079 struct cgraph_node *node;
1080 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1081 int order_pos, new_order_pos = 0;
1082 int i;
1083
1084 order_pos = cgraph_postorder (order);
1085 gcc_assert (order_pos == cgraph_n_nodes);
1086
1087 /* Garbage collector may remove inline clones we eliminate during
1088 optimization. So we must be sure to not reference them. */
1089 for (i = 0; i < order_pos; i++)
1090 if (order[i]->output)
1091 order[new_order_pos++] = order[i];
1092
1093 for (i = new_order_pos - 1; i >= 0; i--)
1094 {
1095 node = order[i];
1096 if (node->output)
1097 {
1098 gcc_assert (node->reachable);
1099 node->output = 0;
1100 cgraph_expand_function (node);
1101 }
1102 }
1103 cgraph_process_new_functions ();
1104
1105 free (order);
1106
1107 }
1108
1109 /* This is used to sort the node types by the cgraph order number. */
1110
1111 struct cgraph_order_sort
1112 {
1113 enum { ORDER_UNDEFINED = 0, ORDER_FUNCTION, ORDER_VAR, ORDER_ASM } kind;
1114 union
1115 {
1116 struct cgraph_node *f;
1117 struct varpool_node *v;
1118 struct cgraph_asm_node *a;
1119 } u;
1120 };
1121
1122 /* Output all functions, variables, and asm statements in the order
1123 according to their order fields, which is the order in which they
1124 appeared in the file. This implements -fno-toplevel-reorder. In
1125 this mode we may output functions and variables which don't really
1126 need to be output. */
1127
1128 static void
1129 cgraph_output_in_order (void)
1130 {
1131 int max;
1132 size_t size;
1133 struct cgraph_order_sort *nodes;
1134 int i;
1135 struct cgraph_node *pf;
1136 struct varpool_node *pv;
1137 struct cgraph_asm_node *pa;
1138
1139 max = cgraph_order;
1140 size = max * sizeof (struct cgraph_order_sort);
1141 nodes = (struct cgraph_order_sort *) alloca (size);
1142 memset (nodes, 0, size);
1143
1144 varpool_analyze_pending_decls ();
1145
1146 for (pf = cgraph_nodes; pf; pf = pf->next)
1147 {
1148 if (pf->output)
1149 {
1150 i = pf->order;
1151 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1152 nodes[i].kind = ORDER_FUNCTION;
1153 nodes[i].u.f = pf;
1154 }
1155 }
1156
1157 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
1158 {
1159 i = pv->order;
1160 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1161 nodes[i].kind = ORDER_VAR;
1162 nodes[i].u.v = pv;
1163 }
1164
1165 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1166 {
1167 i = pa->order;
1168 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1169 nodes[i].kind = ORDER_ASM;
1170 nodes[i].u.a = pa;
1171 }
1172
1173 /* In toplevel reorder mode we output all statics; mark them as needed. */
1174 for (i = 0; i < max; ++i)
1175 {
1176 if (nodes[i].kind == ORDER_VAR)
1177 {
1178 varpool_mark_needed_node (nodes[i].u.v);
1179 }
1180 }
1181 varpool_empty_needed_queue ();
1182
1183 for (i = 0; i < max; ++i)
1184 {
1185 switch (nodes[i].kind)
1186 {
1187 case ORDER_FUNCTION:
1188 nodes[i].u.f->output = 0;
1189 cgraph_expand_function (nodes[i].u.f);
1190 break;
1191
1192 case ORDER_VAR:
1193 varpool_assemble_decl (nodes[i].u.v);
1194 break;
1195
1196 case ORDER_ASM:
1197 assemble_asm (nodes[i].u.a->asm_str);
1198 break;
1199
1200 case ORDER_UNDEFINED:
1201 break;
1202
1203 default:
1204 gcc_unreachable ();
1205 }
1206 }
1207
1208 cgraph_asm_nodes = NULL;
1209 }
1210
1211 /* Return true when function body of DECL still needs to be kept around
1212 for later re-use. */
1213 bool
1214 cgraph_preserve_function_body_p (tree decl)
1215 {
1216 struct cgraph_node *node;
1217
1218 gcc_assert (cgraph_global_info_ready);
1219 /* Look if there is any clone around. */
1220 for (node = cgraph_node (decl); node; node = node->next_clone)
1221 if (node->global.inlined_to)
1222 return true;
1223 return false;
1224 }
1225
1226 static void
1227 ipa_passes (void)
1228 {
1229 set_cfun (NULL);
1230 current_function_decl = NULL;
1231 gimple_register_cfg_hooks ();
1232 bitmap_obstack_initialize (NULL);
1233 execute_ipa_pass_list (all_ipa_passes);
1234 bitmap_obstack_release (NULL);
1235 }
1236
1237 /* Perform simple optimizations based on callgraph. */
1238
1239 void
1240 cgraph_optimize (void)
1241 {
1242 if (errorcount || sorrycount)
1243 return;
1244
1245 #ifdef ENABLE_CHECKING
1246 verify_cgraph ();
1247 #endif
1248
1249 /* Call functions declared with the "constructor" or "destructor"
1250 attribute. */
1251 cgraph_build_cdtor_fns ();
1252
1253 /* Frontend may output common variables after the unit has been finalized.
1254 It is safe to deal with them here as they are always zero initialized. */
1255 varpool_analyze_pending_decls ();
1256 cgraph_analyze_functions ();
1257
1258 timevar_push (TV_CGRAPHOPT);
1259 if (pre_ipa_mem_report)
1260 {
1261 fprintf (stderr, "Memory consumption before IPA\n");
1262 dump_memory_report (false);
1263 }
1264 if (!quiet_flag)
1265 fprintf (stderr, "Performing interprocedural optimizations\n");
1266 cgraph_state = CGRAPH_STATE_IPA;
1267
1268 /* Don't run the IPA passes if there was any error or sorry messages. */
1269 if (errorcount == 0 && sorrycount == 0)
1270 ipa_passes ();
1271
1272 /* This pass remove bodies of extern inline functions we never inlined.
1273 Do this later so other IPA passes see what is really going on. */
1274 cgraph_remove_unreachable_nodes (false, dump_file);
1275 cgraph_global_info_ready = true;
1276 if (cgraph_dump_file)
1277 {
1278 fprintf (cgraph_dump_file, "Optimized ");
1279 dump_cgraph (cgraph_dump_file);
1280 dump_varpool (cgraph_dump_file);
1281 }
1282 if (post_ipa_mem_report)
1283 {
1284 fprintf (stderr, "Memory consumption after IPA\n");
1285 dump_memory_report (false);
1286 }
1287 timevar_pop (TV_CGRAPHOPT);
1288
1289 /* Output everything. */
1290 if (!quiet_flag)
1291 fprintf (stderr, "Assembling functions:\n");
1292 #ifdef ENABLE_CHECKING
1293 verify_cgraph ();
1294 #endif
1295
1296 cgraph_mark_functions_to_output ();
1297
1298 cgraph_state = CGRAPH_STATE_EXPANSION;
1299 if (!flag_toplevel_reorder)
1300 cgraph_output_in_order ();
1301 else
1302 {
1303 cgraph_output_pending_asms ();
1304
1305 cgraph_expand_all_functions ();
1306 varpool_remove_unreferenced_decls ();
1307
1308 varpool_assemble_pending_decls ();
1309 }
1310 varpool_output_debug_info ();
1311 cgraph_process_new_functions ();
1312 cgraph_state = CGRAPH_STATE_FINISHED;
1313
1314 if (cgraph_dump_file)
1315 {
1316 fprintf (cgraph_dump_file, "\nFinal ");
1317 dump_cgraph (cgraph_dump_file);
1318 }
1319 #ifdef ENABLE_CHECKING
1320 verify_cgraph ();
1321 /* Double check that all inline clones are gone and that all
1322 function bodies have been released from memory. */
1323 if (!(sorrycount || errorcount))
1324 {
1325 struct cgraph_node *node;
1326 bool error_found = false;
1327
1328 for (node = cgraph_nodes; node; node = node->next)
1329 if (node->analyzed
1330 && (node->global.inlined_to
1331 || gimple_body (node->decl)))
1332 {
1333 error_found = true;
1334 dump_cgraph_node (stderr, node);
1335 }
1336 if (error_found)
1337 internal_error ("nodes with unreleased memory found");
1338 }
1339 #endif
1340 }
1341 /* Generate and emit a static constructor or destructor. WHICH must
1342 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
1343 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
1344 initialization priority for this constructor or destructor. */
1345
1346 void
1347 cgraph_build_static_cdtor (char which, tree body, int priority)
1348 {
1349 static int counter = 0;
1350 char which_buf[16];
1351 tree decl, name, resdecl;
1352
1353 /* The priority is encoded in the constructor or destructor name.
1354 collect2 will sort the names and arrange that they are called at
1355 program startup. */
1356 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
1357 name = get_file_function_name (which_buf);
1358
1359 decl = build_decl (FUNCTION_DECL, name,
1360 build_function_type (void_type_node, void_list_node));
1361 current_function_decl = decl;
1362
1363 resdecl = build_decl (RESULT_DECL, NULL_TREE, void_type_node);
1364 DECL_ARTIFICIAL (resdecl) = 1;
1365 DECL_RESULT (decl) = resdecl;
1366
1367 allocate_struct_function (decl, false);
1368
1369 TREE_STATIC (decl) = 1;
1370 TREE_USED (decl) = 1;
1371 DECL_ARTIFICIAL (decl) = 1;
1372 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1373 DECL_SAVED_TREE (decl) = body;
1374 TREE_PUBLIC (decl) = ! targetm.have_ctors_dtors;
1375 DECL_UNINLINABLE (decl) = 1;
1376
1377 DECL_INITIAL (decl) = make_node (BLOCK);
1378 TREE_USED (DECL_INITIAL (decl)) = 1;
1379
1380 DECL_SOURCE_LOCATION (decl) = input_location;
1381 cfun->function_end_locus = input_location;
1382
1383 switch (which)
1384 {
1385 case 'I':
1386 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1387 decl_init_priority_insert (decl, priority);
1388 break;
1389 case 'D':
1390 DECL_STATIC_DESTRUCTOR (decl) = 1;
1391 decl_fini_priority_insert (decl, priority);
1392 break;
1393 default:
1394 gcc_unreachable ();
1395 }
1396
1397 gimplify_function_tree (decl);
1398
1399 cgraph_add_new_function (decl, false);
1400 cgraph_mark_needed_node (cgraph_node (decl));
1401 set_cfun (NULL);
1402 }
1403
1404 void
1405 init_cgraph (void)
1406 {
1407 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1408 }
1409
1410 /* The edges representing the callers of the NEW_VERSION node were
1411 fixed by cgraph_function_versioning (), now the call_expr in their
1412 respective tree code should be updated to call the NEW_VERSION. */
1413
1414 static void
1415 update_call_expr (struct cgraph_node *new_version)
1416 {
1417 struct cgraph_edge *e;
1418
1419 gcc_assert (new_version);
1420
1421 /* Update the call expr on the edges to call the new version. */
1422 for (e = new_version->callers; e; e = e->next_caller)
1423 gimple_call_set_fn (e->call_stmt, new_version->decl);
1424 }
1425
1426
1427 /* Create a new cgraph node which is the new version of
1428 OLD_VERSION node. REDIRECT_CALLERS holds the callers
1429 edges which should be redirected to point to
1430 NEW_VERSION. ALL the callees edges of OLD_VERSION
1431 are cloned to the new version node. Return the new
1432 version node. */
1433
1434 static struct cgraph_node *
1435 cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
1436 tree new_decl,
1437 VEC(cgraph_edge_p,heap) *redirect_callers)
1438 {
1439 struct cgraph_node *new_version;
1440 struct cgraph_edge *e, *new_e;
1441 struct cgraph_edge *next_callee;
1442 unsigned i;
1443
1444 gcc_assert (old_version);
1445
1446 new_version = cgraph_node (new_decl);
1447
1448 new_version->analyzed = true;
1449 new_version->local = old_version->local;
1450 new_version->global = old_version->global;
1451 new_version->rtl = new_version->rtl;
1452 new_version->reachable = true;
1453 new_version->count = old_version->count;
1454
1455 /* Clone the old node callees. Recursive calls are
1456 also cloned. */
1457 for (e = old_version->callees;e; e=e->next_callee)
1458 {
1459 new_e = cgraph_clone_edge (e, new_version, e->call_stmt, 0, e->frequency,
1460 e->loop_nest, true);
1461 new_e->count = e->count;
1462 }
1463 /* Fix recursive calls.
1464 If OLD_VERSION has a recursive call after the
1465 previous edge cloning, the new version will have an edge
1466 pointing to the old version, which is wrong;
1467 Redirect it to point to the new version. */
1468 for (e = new_version->callees ; e; e = next_callee)
1469 {
1470 next_callee = e->next_callee;
1471 if (e->callee == old_version)
1472 cgraph_redirect_edge_callee (e, new_version);
1473
1474 if (!next_callee)
1475 break;
1476 }
1477 for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
1478 {
1479 /* Redirect calls to the old version node to point to its new
1480 version. */
1481 cgraph_redirect_edge_callee (e, new_version);
1482 }
1483
1484 return new_version;
1485 }
1486
1487 /* Perform function versioning.
1488 Function versioning includes copying of the tree and
1489 a callgraph update (creating a new cgraph node and updating
1490 its callees and callers).
1491
1492 REDIRECT_CALLERS varray includes the edges to be redirected
1493 to the new version.
1494
1495 TREE_MAP is a mapping of tree nodes we want to replace with
1496 new ones (according to results of prior analysis).
1497 OLD_VERSION_NODE is the node that is versioned.
1498 It returns the new version's cgraph node. */
1499
1500 struct cgraph_node *
1501 cgraph_function_versioning (struct cgraph_node *old_version_node,
1502 VEC(cgraph_edge_p,heap) *redirect_callers,
1503 varray_type tree_map)
1504 {
1505 tree old_decl = old_version_node->decl;
1506 struct cgraph_node *new_version_node = NULL;
1507 tree new_decl;
1508
1509 if (!tree_versionable_function_p (old_decl))
1510 return NULL;
1511
1512 /* Make a new FUNCTION_DECL tree node for the
1513 new version. */
1514 new_decl = copy_node (old_decl);
1515
1516 /* Create the new version's call-graph node.
1517 and update the edges of the new node. */
1518 new_version_node =
1519 cgraph_copy_node_for_versioning (old_version_node, new_decl,
1520 redirect_callers);
1521
1522 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1523 tree_function_versioning (old_decl, new_decl, tree_map, false);
1524 /* Update the call_expr on the edges to call the new version node. */
1525 update_call_expr (new_version_node);
1526
1527 /* Update the new version's properties.
1528 Make The new version visible only within this translation unit.
1529 ??? We cannot use COMDAT linkage because there is no
1530 ABI support for this. */
1531 DECL_EXTERNAL (new_version_node->decl) = 0;
1532 DECL_ONE_ONLY (new_version_node->decl) = 0;
1533 TREE_PUBLIC (new_version_node->decl) = 0;
1534 DECL_COMDAT (new_version_node->decl) = 0;
1535 new_version_node->local.externally_visible = 0;
1536 new_version_node->local.local = 1;
1537 new_version_node->lowered = true;
1538 return new_version_node;
1539 }
1540
1541 /* Produce separate function body for inline clones so the offline copy can be
1542 modified without affecting them. */
1543 struct cgraph_node *
1544 save_inline_function_body (struct cgraph_node *node)
1545 {
1546 struct cgraph_node *first_clone;
1547
1548 gcc_assert (node == cgraph_node (node->decl));
1549
1550 cgraph_lower_function (node);
1551
1552 first_clone = node->next_clone;
1553
1554 first_clone->decl = copy_node (node->decl);
1555 node->next_clone = NULL;
1556 first_clone->prev_clone = NULL;
1557 cgraph_insert_node_to_hashtable (first_clone);
1558 gcc_assert (first_clone == cgraph_node (first_clone->decl));
1559
1560 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1561 tree_function_versioning (node->decl, first_clone->decl, NULL, true);
1562
1563 DECL_EXTERNAL (first_clone->decl) = 0;
1564 DECL_ONE_ONLY (first_clone->decl) = 0;
1565 TREE_PUBLIC (first_clone->decl) = 0;
1566 DECL_COMDAT (first_clone->decl) = 0;
1567
1568 for (node = first_clone->next_clone; node; node = node->next_clone)
1569 node->decl = first_clone->decl;
1570 #ifdef ENABLE_CHECKING
1571 verify_cgraph_node (first_clone);
1572 #endif
1573 return first_clone;
1574 }
1575
1576 #include "gt-cgraphunit.h"