ipa-reference.c: Do not include c-common.h, include splay-tree.h.
[gcc.git] / gcc / cgraphunit.c
1 /* Callgraph based interprocedural optimizations.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This module implements main driver of compilation process as well as
23 few basic interprocedural optimizers.
24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
35 (There is one exception needed for implementing GCC extern inline
36 function.)
37
38 - varpool_finalize_variable
39
40 This function has same behavior as the above but is used for static
41 variables.
42
43 - cgraph_finalize_compilation_unit
44
45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
47
48 In the the call-graph construction and local function
49 analysis takes place here. Bodies of unreachable functions are released
50 to conserve memory usage.
51
52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
54
55 - cgraph_optimize
56
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
61
62 - cgraph_mark_needed_node
63 - varpool_mark_needed_node
64
65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
69 used by C++ frontend to explicitly mark the keyed methods.
70
71 - analyze_expr callback
72
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
76
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
79
80 Analyzing of all functions is deferred
81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
82
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
90
91 The intra-procedural information is produced and its existence
92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
95
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
98
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
103
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
106 where reference has been optimized out. */
107
108
109 #include "config.h"
110 #include "system.h"
111 #include "coretypes.h"
112 #include "tm.h"
113 #include "tree.h"
114 #include "rtl.h"
115 #include "tree-flow.h"
116 #include "tree-inline.h"
117 #include "langhooks.h"
118 #include "pointer-set.h"
119 #include "toplev.h"
120 #include "flags.h"
121 #include "ggc.h"
122 #include "debug.h"
123 #include "target.h"
124 #include "cgraph.h"
125 #include "diagnostic.h"
126 #include "timevar.h"
127 #include "params.h"
128 #include "fibheap.h"
129 #include "intl.h"
130 #include "function.h"
131 #include "ipa-prop.h"
132 #include "gimple.h"
133 #include "tree-iterator.h"
134 #include "tree-pass.h"
135 #include "output.h"
136 #include "coverage.h"
137
138 static void cgraph_expand_all_functions (void);
139 static void cgraph_mark_functions_to_output (void);
140 static void cgraph_expand_function (struct cgraph_node *);
141 static void cgraph_output_pending_asms (void);
142
143 static FILE *cgraph_dump_file;
144
145 /* A vector of FUNCTION_DECLs declared as static constructors. */
146 static GTY (()) VEC(tree, gc) *static_ctors;
147 /* A vector of FUNCTION_DECLs declared as static destructors. */
148 static GTY (()) VEC(tree, gc) *static_dtors;
149
150 /* When target does not have ctors and dtors, we call all constructor
151 and destructor by special initialization/destruction function
152 recognized by collect2.
153
154 When we are going to build this function, collect all constructors and
155 destructors and turn them into normal functions. */
156
157 static void
158 record_cdtor_fn (tree fndecl)
159 {
160 struct cgraph_node *node;
161 if (targetm.have_ctors_dtors
162 || (!DECL_STATIC_CONSTRUCTOR (fndecl)
163 && !DECL_STATIC_DESTRUCTOR (fndecl)))
164 return;
165
166 if (DECL_STATIC_CONSTRUCTOR (fndecl))
167 {
168 VEC_safe_push (tree, gc, static_ctors, fndecl);
169 DECL_STATIC_CONSTRUCTOR (fndecl) = 0;
170 }
171 if (DECL_STATIC_DESTRUCTOR (fndecl))
172 {
173 VEC_safe_push (tree, gc, static_dtors, fndecl);
174 DECL_STATIC_DESTRUCTOR (fndecl) = 0;
175 }
176 node = cgraph_node (fndecl);
177 node->local.disregard_inline_limits = 1;
178 cgraph_mark_reachable_node (node);
179 }
180
181 /* Define global constructors/destructor functions for the CDTORS, of
182 which they are LEN. The CDTORS are sorted by initialization
183 priority. If CTOR_P is true, these are constructors; otherwise,
184 they are destructors. */
185
186 static void
187 build_cdtor (bool ctor_p, tree *cdtors, size_t len)
188 {
189 size_t i;
190
191 i = 0;
192 while (i < len)
193 {
194 tree body;
195 tree fn;
196 priority_type priority;
197
198 priority = 0;
199 body = NULL_TREE;
200 /* Find the next batch of constructors/destructors with the same
201 initialization priority. */
202 do
203 {
204 priority_type p;
205 fn = cdtors[i];
206 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
207 if (!body)
208 priority = p;
209 else if (p != priority)
210 break;
211 append_to_statement_list (build_function_call_expr (fn, 0),
212 &body);
213 ++i;
214 }
215 while (i < len);
216 gcc_assert (body != NULL_TREE);
217 /* Generate a function to call all the function of like
218 priority. */
219 cgraph_build_static_cdtor (ctor_p ? 'I' : 'D', body, priority);
220 }
221 }
222
223 /* Comparison function for qsort. P1 and P2 are actually of type
224 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
225 used to determine the sort order. */
226
227 static int
228 compare_ctor (const void *p1, const void *p2)
229 {
230 tree f1;
231 tree f2;
232 int priority1;
233 int priority2;
234
235 f1 = *(const tree *)p1;
236 f2 = *(const tree *)p2;
237 priority1 = DECL_INIT_PRIORITY (f1);
238 priority2 = DECL_INIT_PRIORITY (f2);
239
240 if (priority1 < priority2)
241 return -1;
242 else if (priority1 > priority2)
243 return 1;
244 else
245 /* Ensure a stable sort. */
246 return (const tree *)p1 - (const tree *)p2;
247 }
248
249 /* Comparison function for qsort. P1 and P2 are actually of type
250 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
251 used to determine the sort order. */
252
253 static int
254 compare_dtor (const void *p1, const void *p2)
255 {
256 tree f1;
257 tree f2;
258 int priority1;
259 int priority2;
260
261 f1 = *(const tree *)p1;
262 f2 = *(const tree *)p2;
263 priority1 = DECL_FINI_PRIORITY (f1);
264 priority2 = DECL_FINI_PRIORITY (f2);
265
266 if (priority1 < priority2)
267 return -1;
268 else if (priority1 > priority2)
269 return 1;
270 else
271 /* Ensure a stable sort. */
272 return (const tree *)p1 - (const tree *)p2;
273 }
274
275 /* Generate functions to call static constructors and destructors
276 for targets that do not support .ctors/.dtors sections. These
277 functions have magic names which are detected by collect2. */
278
279 static void
280 cgraph_build_cdtor_fns (void)
281 {
282 if (!VEC_empty (tree, static_ctors))
283 {
284 gcc_assert (!targetm.have_ctors_dtors);
285 qsort (VEC_address (tree, static_ctors),
286 VEC_length (tree, static_ctors),
287 sizeof (tree),
288 compare_ctor);
289 build_cdtor (/*ctor_p=*/true,
290 VEC_address (tree, static_ctors),
291 VEC_length (tree, static_ctors));
292 VEC_truncate (tree, static_ctors, 0);
293 }
294
295 if (!VEC_empty (tree, static_dtors))
296 {
297 gcc_assert (!targetm.have_ctors_dtors);
298 qsort (VEC_address (tree, static_dtors),
299 VEC_length (tree, static_dtors),
300 sizeof (tree),
301 compare_dtor);
302 build_cdtor (/*ctor_p=*/false,
303 VEC_address (tree, static_dtors),
304 VEC_length (tree, static_dtors));
305 VEC_truncate (tree, static_dtors, 0);
306 }
307 }
308
309 /* Determine if function DECL is needed. That is, visible to something
310 either outside this translation unit, something magic in the system
311 configury. */
312
313 static bool
314 decide_is_function_needed (struct cgraph_node *node, tree decl)
315 {
316 if (MAIN_NAME_P (DECL_NAME (decl))
317 && TREE_PUBLIC (decl))
318 {
319 node->local.externally_visible = true;
320 return true;
321 }
322
323 /* If the user told us it is used, then it must be so. */
324 if (node->local.externally_visible)
325 return true;
326
327 /* ??? If the assembler name is set by hand, it is possible to assemble
328 the name later after finalizing the function and the fact is noticed
329 in assemble_name then. This is arguably a bug. */
330 if (DECL_ASSEMBLER_NAME_SET_P (decl)
331 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
332 return true;
333
334 /* With -fkeep-inline-functions we are keeping all inline functions except
335 for extern inline ones. */
336 if (flag_keep_inline_functions
337 && DECL_DECLARED_INLINE_P (decl)
338 && !DECL_EXTERNAL (decl)
339 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl)))
340 return true;
341
342 /* If we decided it was needed before, but at the time we didn't have
343 the body of the function available, then it's still needed. We have
344 to go back and re-check its dependencies now. */
345 if (node->needed)
346 return true;
347
348 /* Externally visible functions must be output. The exception is
349 COMDAT functions that must be output only when they are needed.
350
351 When not optimizing, also output the static functions. (see
352 PR24561), but don't do so for always_inline functions, functions
353 declared inline and nested functions. These was optimized out
354 in the original implementation and it is unclear whether we want
355 to change the behavior here. */
356 if (((TREE_PUBLIC (decl)
357 || (!optimize && !node->local.disregard_inline_limits
358 && !DECL_DECLARED_INLINE_P (decl)
359 && !node->origin))
360 && !flag_whole_program)
361 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
362 return true;
363
364 /* Constructors and destructors are reachable from the runtime by
365 some mechanism. */
366 if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
367 return true;
368
369 return false;
370 }
371
372 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
373 functions into callgraph in a way so they look like ordinary reachable
374 functions inserted into callgraph already at construction time. */
375
376 bool
377 cgraph_process_new_functions (void)
378 {
379 bool output = false;
380 tree fndecl;
381 struct cgraph_node *node;
382
383 /* Note that this queue may grow as its being processed, as the new
384 functions may generate new ones. */
385 while (cgraph_new_nodes)
386 {
387 node = cgraph_new_nodes;
388 fndecl = node->decl;
389 cgraph_new_nodes = cgraph_new_nodes->next_needed;
390 switch (cgraph_state)
391 {
392 case CGRAPH_STATE_CONSTRUCTION:
393 /* At construction time we just need to finalize function and move
394 it into reachable functions list. */
395
396 node->next_needed = NULL;
397 cgraph_finalize_function (fndecl, false);
398 cgraph_mark_reachable_node (node);
399 output = true;
400 break;
401
402 case CGRAPH_STATE_IPA:
403 case CGRAPH_STATE_IPA_SSA:
404 /* When IPA optimization already started, do all essential
405 transformations that has been already performed on the whole
406 cgraph but not on this function. */
407
408 gimple_register_cfg_hooks ();
409 if (!node->analyzed)
410 cgraph_analyze_function (node);
411 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
412 current_function_decl = fndecl;
413 compute_inline_parameters (node);
414 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
415 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
416 /* When not optimizing, be sure we run early local passes anyway
417 to expand OMP. */
418 || !optimize)
419 execute_pass_list (pass_early_local_passes.pass.sub);
420 free_dominance_info (CDI_POST_DOMINATORS);
421 free_dominance_info (CDI_DOMINATORS);
422 pop_cfun ();
423 current_function_decl = NULL;
424 break;
425
426 case CGRAPH_STATE_EXPANSION:
427 /* Functions created during expansion shall be compiled
428 directly. */
429 node->process = 0;
430 cgraph_expand_function (node);
431 break;
432
433 default:
434 gcc_unreachable ();
435 break;
436 }
437 cgraph_call_function_insertion_hooks (node);
438 }
439 return output;
440 }
441
442 /* As an GCC extension we allow redefinition of the function. The
443 semantics when both copies of bodies differ is not well defined.
444 We replace the old body with new body so in unit at a time mode
445 we always use new body, while in normal mode we may end up with
446 old body inlined into some functions and new body expanded and
447 inlined in others.
448
449 ??? It may make more sense to use one body for inlining and other
450 body for expanding the function but this is difficult to do. */
451
452 static void
453 cgraph_reset_node (struct cgraph_node *node)
454 {
455 /* If node->process is set, then we have already begun whole-unit analysis.
456 This is *not* testing for whether we've already emitted the function.
457 That case can be sort-of legitimately seen with real function redefinition
458 errors. I would argue that the front end should never present us with
459 such a case, but don't enforce that for now. */
460 gcc_assert (!node->process);
461
462 /* Reset our data structures so we can analyze the function again. */
463 memset (&node->local, 0, sizeof (node->local));
464 memset (&node->global, 0, sizeof (node->global));
465 memset (&node->rtl, 0, sizeof (node->rtl));
466 node->analyzed = false;
467 node->local.redefined_extern_inline = true;
468 node->local.finalized = false;
469
470 cgraph_node_remove_callees (node);
471
472 /* We may need to re-queue the node for assembling in case
473 we already proceeded it and ignored as not needed or got
474 a re-declaration in IMA mode. */
475 if (node->reachable)
476 {
477 struct cgraph_node *n;
478
479 for (n = cgraph_nodes_queue; n; n = n->next_needed)
480 if (n == node)
481 break;
482 if (!n)
483 node->reachable = 0;
484 }
485 }
486
487 static void
488 cgraph_lower_function (struct cgraph_node *node)
489 {
490 if (node->lowered)
491 return;
492 tree_lowering_passes (node->decl);
493 node->lowered = true;
494 }
495
496 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
497 logic in effect. If NESTED is true, then our caller cannot stand to have
498 the garbage collector run at the moment. We would need to either create
499 a new GC context, or just not compile right now. */
500
501 void
502 cgraph_finalize_function (tree decl, bool nested)
503 {
504 struct cgraph_node *node = cgraph_node (decl);
505
506 if (node->local.finalized)
507 cgraph_reset_node (node);
508
509 node->pid = cgraph_max_pid ++;
510 notice_global_symbol (decl);
511 node->local.finalized = true;
512 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
513 record_cdtor_fn (node->decl);
514 if (node->nested)
515 lower_nested_functions (decl);
516 gcc_assert (!node->nested);
517
518 if (decide_is_function_needed (node, decl))
519 cgraph_mark_needed_node (node);
520
521 /* Since we reclaim unreachable nodes at the end of every language
522 level unit, we need to be conservative about possible entry points
523 there. */
524 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)))
525 cgraph_mark_reachable_node (node);
526
527 /* If we've not yet emitted decl, tell the debug info about it. */
528 if (!TREE_ASM_WRITTEN (decl))
529 (*debug_hooks->deferred_inline_function) (decl);
530
531 /* Possibly warn about unused parameters. */
532 if (warn_unused_parameter)
533 do_warn_unused_parameter (decl);
534
535 if (!nested)
536 ggc_collect ();
537 }
538
539 /* C99 extern inline keywords allow changing of declaration after function
540 has been finalized. We need to re-decide if we want to mark the function as
541 needed then. */
542
543 void
544 cgraph_mark_if_needed (tree decl)
545 {
546 struct cgraph_node *node = cgraph_node (decl);
547 if (node->local.finalized && decide_is_function_needed (node, decl))
548 cgraph_mark_needed_node (node);
549 }
550
551 /* Verify cgraph nodes of given cgraph node. */
552 void
553 verify_cgraph_node (struct cgraph_node *node)
554 {
555 struct cgraph_edge *e;
556 struct cgraph_node *main_clone;
557 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
558 struct function *saved_cfun = cfun;
559 basic_block this_block;
560 gimple_stmt_iterator gsi;
561 bool error_found = false;
562
563 if (errorcount || sorrycount)
564 return;
565
566 timevar_push (TV_CGRAPH_VERIFY);
567 /* debug_generic_stmt needs correct cfun */
568 set_cfun (this_cfun);
569 for (e = node->callees; e; e = e->next_callee)
570 if (e->aux)
571 {
572 error ("aux field set for edge %s->%s",
573 cgraph_node_name (e->caller), cgraph_node_name (e->callee));
574 error_found = true;
575 }
576 if (node->count < 0)
577 {
578 error ("Execution count is negative");
579 error_found = true;
580 }
581 for (e = node->callers; e; e = e->next_caller)
582 {
583 if (e->count < 0)
584 {
585 error ("caller edge count is negative");
586 error_found = true;
587 }
588 if (e->frequency < 0)
589 {
590 error ("caller edge frequency is negative");
591 error_found = true;
592 }
593 if (e->frequency > CGRAPH_FREQ_MAX)
594 {
595 error ("caller edge frequency is too large");
596 error_found = true;
597 }
598 if (!e->inline_failed)
599 {
600 if (node->global.inlined_to
601 != (e->caller->global.inlined_to
602 ? e->caller->global.inlined_to : e->caller))
603 {
604 error ("inlined_to pointer is wrong");
605 error_found = true;
606 }
607 if (node->callers->next_caller)
608 {
609 error ("multiple inline callers");
610 error_found = true;
611 }
612 }
613 else
614 if (node->global.inlined_to)
615 {
616 error ("inlined_to pointer set for noninline callers");
617 error_found = true;
618 }
619 }
620 if (!node->callers && node->global.inlined_to)
621 {
622 error ("inlined_to pointer is set but no predecessors found");
623 error_found = true;
624 }
625 if (node->global.inlined_to == node)
626 {
627 error ("inlined_to pointer refers to itself");
628 error_found = true;
629 }
630
631 for (main_clone = cgraph_node (node->decl); main_clone;
632 main_clone = main_clone->next_clone)
633 if (main_clone == node)
634 break;
635 if (!cgraph_node (node->decl))
636 {
637 error ("node not found in cgraph_hash");
638 error_found = true;
639 }
640
641 if (node->analyzed
642 && !TREE_ASM_WRITTEN (node->decl)
643 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to))
644 {
645 if (this_cfun->cfg)
646 {
647 /* The nodes we're interested in are never shared, so walk
648 the tree ignoring duplicates. */
649 struct pointer_set_t *visited_nodes = pointer_set_create ();
650 /* Reach the trees by walking over the CFG, and note the
651 enclosing basic-blocks in the call edges. */
652 FOR_EACH_BB_FN (this_block, this_cfun)
653 for (gsi = gsi_start_bb (this_block);
654 !gsi_end_p (gsi);
655 gsi_next (&gsi))
656 {
657 gimple stmt = gsi_stmt (gsi);
658 tree decl;
659 if (is_gimple_call (stmt) && (decl = gimple_call_fndecl (stmt)))
660 {
661 struct cgraph_edge *e = cgraph_edge (node, stmt);
662 if (e)
663 {
664 if (e->aux)
665 {
666 error ("shared call_stmt:");
667 debug_gimple_stmt (stmt);
668 error_found = true;
669 }
670 if (e->callee->decl != cgraph_node (decl)->decl
671 && e->inline_failed)
672 {
673 error ("edge points to wrong declaration:");
674 debug_tree (e->callee->decl);
675 fprintf (stderr," Instead of:");
676 debug_tree (decl);
677 }
678 e->aux = (void *)1;
679 }
680 else
681 {
682 error ("missing callgraph edge for call stmt:");
683 debug_gimple_stmt (stmt);
684 error_found = true;
685 }
686 }
687 }
688 pointer_set_destroy (visited_nodes);
689 }
690 else
691 /* No CFG available?! */
692 gcc_unreachable ();
693
694 for (e = node->callees; e; e = e->next_callee)
695 {
696 if (!e->aux && !e->indirect_call)
697 {
698 error ("edge %s->%s has no corresponding call_stmt",
699 cgraph_node_name (e->caller),
700 cgraph_node_name (e->callee));
701 debug_gimple_stmt (e->call_stmt);
702 error_found = true;
703 }
704 e->aux = 0;
705 }
706 }
707 if (error_found)
708 {
709 dump_cgraph_node (stderr, node);
710 internal_error ("verify_cgraph_node failed");
711 }
712 set_cfun (saved_cfun);
713 timevar_pop (TV_CGRAPH_VERIFY);
714 }
715
716 /* Verify whole cgraph structure. */
717 void
718 verify_cgraph (void)
719 {
720 struct cgraph_node *node;
721
722 if (sorrycount || errorcount)
723 return;
724
725 for (node = cgraph_nodes; node; node = node->next)
726 verify_cgraph_node (node);
727 }
728
729 /* Output all asm statements we have stored up to be output. */
730
731 static void
732 cgraph_output_pending_asms (void)
733 {
734 struct cgraph_asm_node *can;
735
736 if (errorcount || sorrycount)
737 return;
738
739 for (can = cgraph_asm_nodes; can; can = can->next)
740 assemble_asm (can->asm_str);
741 cgraph_asm_nodes = NULL;
742 }
743
744 /* Analyze the function scheduled to be output. */
745 void
746 cgraph_analyze_function (struct cgraph_node *node)
747 {
748 tree decl = node->decl;
749
750 current_function_decl = decl;
751 push_cfun (DECL_STRUCT_FUNCTION (decl));
752 cgraph_lower_function (node);
753 node->analyzed = true;
754
755 pop_cfun ();
756 current_function_decl = NULL;
757 }
758
759 /* Look for externally_visible and used attributes and mark cgraph nodes
760 accordingly.
761
762 We cannot mark the nodes at the point the attributes are processed (in
763 handle_*_attribute) because the copy of the declarations available at that
764 point may not be canonical. For example, in:
765
766 void f();
767 void f() __attribute__((used));
768
769 the declaration we see in handle_used_attribute will be the second
770 declaration -- but the front end will subsequently merge that declaration
771 with the original declaration and discard the second declaration.
772
773 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
774
775 void f() {}
776 void f() __attribute__((externally_visible));
777
778 is valid.
779
780 So, we walk the nodes at the end of the translation unit, applying the
781 attributes at that point. */
782
783 static void
784 process_function_and_variable_attributes (struct cgraph_node *first,
785 struct varpool_node *first_var)
786 {
787 struct cgraph_node *node;
788 struct varpool_node *vnode;
789
790 for (node = cgraph_nodes; node != first; node = node->next)
791 {
792 tree decl = node->decl;
793 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
794 {
795 mark_decl_referenced (decl);
796 if (node->local.finalized)
797 cgraph_mark_needed_node (node);
798 }
799 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
800 {
801 if (! TREE_PUBLIC (node->decl))
802 warning (OPT_Wattributes,
803 "%J%<externally_visible%> attribute have effect only on public objects",
804 node->decl);
805 else
806 {
807 if (node->local.finalized)
808 cgraph_mark_needed_node (node);
809 node->local.externally_visible = true;
810 }
811 }
812 }
813 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
814 {
815 tree decl = vnode->decl;
816 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
817 {
818 mark_decl_referenced (decl);
819 if (vnode->finalized)
820 varpool_mark_needed_node (vnode);
821 }
822 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
823 {
824 if (! TREE_PUBLIC (vnode->decl))
825 warning (OPT_Wattributes,
826 "%J%<externally_visible%> attribute have effect only on public objects",
827 vnode->decl);
828 else
829 {
830 if (vnode->finalized)
831 varpool_mark_needed_node (vnode);
832 vnode->externally_visible = true;
833 }
834 }
835 }
836 }
837
838 /* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
839 each reachable functions) and build cgraph.
840 The function can be called multiple times after inserting new nodes
841 into beginning of queue. Just the new part of queue is re-scanned then. */
842
843 static void
844 cgraph_analyze_functions (void)
845 {
846 /* Keep track of already processed nodes when called multiple times for
847 intermodule optimization. */
848 static struct cgraph_node *first_analyzed;
849 struct cgraph_node *first_processed = first_analyzed;
850 static struct varpool_node *first_analyzed_var;
851 struct cgraph_node *node, *next;
852
853 process_function_and_variable_attributes (first_processed,
854 first_analyzed_var);
855 first_processed = cgraph_nodes;
856 first_analyzed_var = varpool_nodes;
857 varpool_analyze_pending_decls ();
858 if (cgraph_dump_file)
859 {
860 fprintf (cgraph_dump_file, "Initial entry points:");
861 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
862 if (node->needed)
863 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
864 fprintf (cgraph_dump_file, "\n");
865 }
866 cgraph_process_new_functions ();
867
868 /* Propagate reachability flag and lower representation of all reachable
869 functions. In the future, lowering will introduce new functions and
870 new entry points on the way (by template instantiation and virtual
871 method table generation for instance). */
872 while (cgraph_nodes_queue)
873 {
874 struct cgraph_edge *edge;
875 tree decl = cgraph_nodes_queue->decl;
876
877 node = cgraph_nodes_queue;
878 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
879 node->next_needed = NULL;
880
881 /* ??? It is possible to create extern inline function and later using
882 weak alias attribute to kill its body. See
883 gcc.c-torture/compile/20011119-1.c */
884 if (!DECL_STRUCT_FUNCTION (decl))
885 {
886 cgraph_reset_node (node);
887 continue;
888 }
889
890 gcc_assert (!node->analyzed && node->reachable);
891 gcc_assert (gimple_body (decl));
892
893 cgraph_analyze_function (node);
894
895 for (edge = node->callees; edge; edge = edge->next_callee)
896 if (!edge->callee->reachable)
897 cgraph_mark_reachable_node (edge->callee);
898
899 /* If decl is a clone of an abstract function, mark that abstract
900 function so that we don't release its body. The DECL_INITIAL() of that
901 abstract function declaration will be later needed to output debug info. */
902 if (DECL_ABSTRACT_ORIGIN (decl))
903 {
904 struct cgraph_node *origin_node = cgraph_node (DECL_ABSTRACT_ORIGIN (decl));
905 origin_node->abstract_and_needed = true;
906 }
907
908 /* We finalize local static variables during constructing callgraph
909 edges. Process their attributes too. */
910 process_function_and_variable_attributes (first_processed,
911 first_analyzed_var);
912 first_processed = cgraph_nodes;
913 first_analyzed_var = varpool_nodes;
914 varpool_analyze_pending_decls ();
915 cgraph_process_new_functions ();
916 }
917
918 /* Collect entry points to the unit. */
919 if (cgraph_dump_file)
920 {
921 fprintf (cgraph_dump_file, "Unit entry points:");
922 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
923 if (node->needed)
924 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
925 fprintf (cgraph_dump_file, "\n\nInitial ");
926 dump_cgraph (cgraph_dump_file);
927 }
928
929 if (cgraph_dump_file)
930 fprintf (cgraph_dump_file, "\nReclaiming functions:");
931
932 for (node = cgraph_nodes; node != first_analyzed; node = next)
933 {
934 tree decl = node->decl;
935 next = node->next;
936
937 if (node->local.finalized && !gimple_has_body_p (decl))
938 cgraph_reset_node (node);
939
940 if (!node->reachable && gimple_has_body_p (decl))
941 {
942 if (cgraph_dump_file)
943 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
944 cgraph_remove_node (node);
945 continue;
946 }
947 else
948 node->next_needed = NULL;
949 gcc_assert (!node->local.finalized || gimple_has_body_p (decl));
950 gcc_assert (node->analyzed == node->local.finalized);
951 }
952 if (cgraph_dump_file)
953 {
954 fprintf (cgraph_dump_file, "\n\nReclaimed ");
955 dump_cgraph (cgraph_dump_file);
956 }
957 first_analyzed = cgraph_nodes;
958 ggc_collect ();
959 }
960
961 /* Analyze the whole compilation unit once it is parsed completely. */
962
963 void
964 cgraph_finalize_compilation_unit (void)
965 {
966 if (errorcount || sorrycount)
967 return;
968
969 finish_aliases_1 ();
970
971 if (!quiet_flag)
972 {
973 fprintf (stderr, "\nAnalyzing compilation unit\n");
974 fflush (stderr);
975 }
976
977 timevar_push (TV_CGRAPH);
978 cgraph_analyze_functions ();
979 timevar_pop (TV_CGRAPH);
980 }
981
982
983 /* Figure out what functions we want to assemble. */
984
985 static void
986 cgraph_mark_functions_to_output (void)
987 {
988 struct cgraph_node *node;
989
990 for (node = cgraph_nodes; node; node = node->next)
991 {
992 tree decl = node->decl;
993 struct cgraph_edge *e;
994
995 gcc_assert (!node->process);
996
997 for (e = node->callers; e; e = e->next_caller)
998 if (e->inline_failed)
999 break;
1000
1001 /* We need to output all local functions that are used and not
1002 always inlined, as well as those that are reachable from
1003 outside the current compilation unit. */
1004 if (node->analyzed
1005 && !node->global.inlined_to
1006 && (node->needed
1007 || (e && node->reachable))
1008 && !TREE_ASM_WRITTEN (decl)
1009 && !DECL_EXTERNAL (decl))
1010 node->process = 1;
1011 else
1012 {
1013 /* We should've reclaimed all functions that are not needed. */
1014 #ifdef ENABLE_CHECKING
1015 if (!node->global.inlined_to
1016 && gimple_has_body_p (decl)
1017 && !DECL_EXTERNAL (decl))
1018 {
1019 dump_cgraph_node (stderr, node);
1020 internal_error ("failed to reclaim unneeded function");
1021 }
1022 #endif
1023 gcc_assert (node->global.inlined_to
1024 || !gimple_has_body_p (decl)
1025 || DECL_EXTERNAL (decl));
1026
1027 }
1028
1029 }
1030 }
1031
1032 /* Expand function specified by NODE. */
1033
1034 static void
1035 cgraph_expand_function (struct cgraph_node *node)
1036 {
1037 tree decl = node->decl;
1038
1039 /* We ought to not compile any inline clones. */
1040 gcc_assert (!node->global.inlined_to);
1041
1042 announce_function (decl);
1043 node->process = 0;
1044
1045 gcc_assert (node->lowered);
1046
1047 /* Generate RTL for the body of DECL. */
1048 if (lang_hooks.callgraph.emit_associated_thunks)
1049 lang_hooks.callgraph.emit_associated_thunks (decl);
1050 tree_rest_of_compilation (decl);
1051
1052 /* Make sure that BE didn't give up on compiling. */
1053 gcc_assert (TREE_ASM_WRITTEN (decl));
1054 current_function_decl = NULL;
1055 gcc_assert (!cgraph_preserve_function_body_p (decl));
1056 cgraph_release_function_body (node);
1057 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1058 points to the dead function body. */
1059 cgraph_node_remove_callees (node);
1060
1061 cgraph_function_flags_ready = true;
1062 }
1063
1064 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
1065
1066 bool
1067 cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason)
1068 {
1069 *reason = e->inline_failed;
1070 return !e->inline_failed;
1071 }
1072
1073
1074
1075 /* Expand all functions that must be output.
1076
1077 Attempt to topologically sort the nodes so function is output when
1078 all called functions are already assembled to allow data to be
1079 propagated across the callgraph. Use a stack to get smaller distance
1080 between a function and its callees (later we may choose to use a more
1081 sophisticated algorithm for function reordering; we will likely want
1082 to use subsections to make the output functions appear in top-down
1083 order). */
1084
1085 static void
1086 cgraph_expand_all_functions (void)
1087 {
1088 struct cgraph_node *node;
1089 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1090 int order_pos, new_order_pos = 0;
1091 int i;
1092
1093 order_pos = cgraph_postorder (order);
1094 gcc_assert (order_pos == cgraph_n_nodes);
1095
1096 /* Garbage collector may remove inline clones we eliminate during
1097 optimization. So we must be sure to not reference them. */
1098 for (i = 0; i < order_pos; i++)
1099 if (order[i]->process)
1100 order[new_order_pos++] = order[i];
1101
1102 for (i = new_order_pos - 1; i >= 0; i--)
1103 {
1104 node = order[i];
1105 if (node->process)
1106 {
1107 gcc_assert (node->reachable);
1108 node->process = 0;
1109 cgraph_expand_function (node);
1110 }
1111 }
1112 cgraph_process_new_functions ();
1113
1114 free (order);
1115
1116 }
1117
1118 /* This is used to sort the node types by the cgraph order number. */
1119
1120 struct cgraph_order_sort
1121 {
1122 enum { ORDER_UNDEFINED = 0, ORDER_FUNCTION, ORDER_VAR, ORDER_ASM } kind;
1123 union
1124 {
1125 struct cgraph_node *f;
1126 struct varpool_node *v;
1127 struct cgraph_asm_node *a;
1128 } u;
1129 };
1130
1131 /* Output all functions, variables, and asm statements in the order
1132 according to their order fields, which is the order in which they
1133 appeared in the file. This implements -fno-toplevel-reorder. In
1134 this mode we may output functions and variables which don't really
1135 need to be output. */
1136
1137 static void
1138 cgraph_output_in_order (void)
1139 {
1140 int max;
1141 size_t size;
1142 struct cgraph_order_sort *nodes;
1143 int i;
1144 struct cgraph_node *pf;
1145 struct varpool_node *pv;
1146 struct cgraph_asm_node *pa;
1147
1148 max = cgraph_order;
1149 size = max * sizeof (struct cgraph_order_sort);
1150 nodes = (struct cgraph_order_sort *) alloca (size);
1151 memset (nodes, 0, size);
1152
1153 varpool_analyze_pending_decls ();
1154
1155 for (pf = cgraph_nodes; pf; pf = pf->next)
1156 {
1157 if (pf->process)
1158 {
1159 i = pf->order;
1160 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1161 nodes[i].kind = ORDER_FUNCTION;
1162 nodes[i].u.f = pf;
1163 }
1164 }
1165
1166 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
1167 {
1168 i = pv->order;
1169 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1170 nodes[i].kind = ORDER_VAR;
1171 nodes[i].u.v = pv;
1172 }
1173
1174 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1175 {
1176 i = pa->order;
1177 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1178 nodes[i].kind = ORDER_ASM;
1179 nodes[i].u.a = pa;
1180 }
1181
1182 /* In toplevel reorder mode we output all statics; mark them as needed. */
1183 for (i = 0; i < max; ++i)
1184 {
1185 if (nodes[i].kind == ORDER_VAR)
1186 {
1187 varpool_mark_needed_node (nodes[i].u.v);
1188 }
1189 }
1190 varpool_empty_needed_queue ();
1191
1192 for (i = 0; i < max; ++i)
1193 {
1194 switch (nodes[i].kind)
1195 {
1196 case ORDER_FUNCTION:
1197 nodes[i].u.f->process = 0;
1198 cgraph_expand_function (nodes[i].u.f);
1199 break;
1200
1201 case ORDER_VAR:
1202 varpool_assemble_decl (nodes[i].u.v);
1203 break;
1204
1205 case ORDER_ASM:
1206 assemble_asm (nodes[i].u.a->asm_str);
1207 break;
1208
1209 case ORDER_UNDEFINED:
1210 break;
1211
1212 default:
1213 gcc_unreachable ();
1214 }
1215 }
1216
1217 cgraph_asm_nodes = NULL;
1218 }
1219
1220 /* Return true when function body of DECL still needs to be kept around
1221 for later re-use. */
1222 bool
1223 cgraph_preserve_function_body_p (tree decl)
1224 {
1225 struct cgraph_node *node;
1226
1227 gcc_assert (cgraph_global_info_ready);
1228 /* Look if there is any clone around. */
1229 for (node = cgraph_node (decl); node; node = node->next_clone)
1230 if (node->global.inlined_to)
1231 return true;
1232 return false;
1233 }
1234
1235 static void
1236 ipa_passes (void)
1237 {
1238 set_cfun (NULL);
1239 current_function_decl = NULL;
1240 gimple_register_cfg_hooks ();
1241 bitmap_obstack_initialize (NULL);
1242 execute_ipa_pass_list (all_ipa_passes);
1243
1244 /* Generate coverage variables and constructors. */
1245 coverage_finish ();
1246
1247 /* Process new functions added. */
1248 set_cfun (NULL);
1249 current_function_decl = NULL;
1250 cgraph_process_new_functions ();
1251
1252 bitmap_obstack_release (NULL);
1253 }
1254
1255 /* Perform simple optimizations based on callgraph. */
1256
1257 void
1258 cgraph_optimize (void)
1259 {
1260 if (errorcount || sorrycount)
1261 return;
1262
1263 #ifdef ENABLE_CHECKING
1264 verify_cgraph ();
1265 #endif
1266
1267 /* Call functions declared with the "constructor" or "destructor"
1268 attribute. */
1269 cgraph_build_cdtor_fns ();
1270
1271 /* Frontend may output common variables after the unit has been finalized.
1272 It is safe to deal with them here as they are always zero initialized. */
1273 varpool_analyze_pending_decls ();
1274 cgraph_analyze_functions ();
1275
1276 timevar_push (TV_CGRAPHOPT);
1277 if (pre_ipa_mem_report)
1278 {
1279 fprintf (stderr, "Memory consumption before IPA\n");
1280 dump_memory_report (false);
1281 }
1282 if (!quiet_flag)
1283 fprintf (stderr, "Performing interprocedural optimizations\n");
1284 cgraph_state = CGRAPH_STATE_IPA;
1285
1286 /* Don't run the IPA passes if there was any error or sorry messages. */
1287 if (errorcount == 0 && sorrycount == 0)
1288 ipa_passes ();
1289
1290 /* This pass remove bodies of extern inline functions we never inlined.
1291 Do this later so other IPA passes see what is really going on. */
1292 cgraph_remove_unreachable_nodes (false, dump_file);
1293 cgraph_global_info_ready = true;
1294 if (cgraph_dump_file)
1295 {
1296 fprintf (cgraph_dump_file, "Optimized ");
1297 dump_cgraph (cgraph_dump_file);
1298 dump_varpool (cgraph_dump_file);
1299 }
1300 if (post_ipa_mem_report)
1301 {
1302 fprintf (stderr, "Memory consumption after IPA\n");
1303 dump_memory_report (false);
1304 }
1305 timevar_pop (TV_CGRAPHOPT);
1306
1307 /* Output everything. */
1308 if (!quiet_flag)
1309 fprintf (stderr, "Assembling functions:\n");
1310 #ifdef ENABLE_CHECKING
1311 verify_cgraph ();
1312 #endif
1313
1314 cgraph_mark_functions_to_output ();
1315
1316 cgraph_state = CGRAPH_STATE_EXPANSION;
1317 if (!flag_toplevel_reorder)
1318 cgraph_output_in_order ();
1319 else
1320 {
1321 cgraph_output_pending_asms ();
1322
1323 cgraph_expand_all_functions ();
1324 varpool_remove_unreferenced_decls ();
1325
1326 varpool_assemble_pending_decls ();
1327 }
1328 cgraph_process_new_functions ();
1329 cgraph_state = CGRAPH_STATE_FINISHED;
1330
1331 if (cgraph_dump_file)
1332 {
1333 fprintf (cgraph_dump_file, "\nFinal ");
1334 dump_cgraph (cgraph_dump_file);
1335 }
1336 #ifdef ENABLE_CHECKING
1337 verify_cgraph ();
1338 /* Double check that all inline clones are gone and that all
1339 function bodies have been released from memory. */
1340 if (!(sorrycount || errorcount))
1341 {
1342 struct cgraph_node *node;
1343 bool error_found = false;
1344
1345 for (node = cgraph_nodes; node; node = node->next)
1346 if (node->analyzed
1347 && (node->global.inlined_to
1348 || gimple_has_body_p (node->decl)))
1349 {
1350 error_found = true;
1351 dump_cgraph_node (stderr, node);
1352 }
1353 if (error_found)
1354 internal_error ("nodes with unreleased memory found");
1355 }
1356 #endif
1357 }
1358 /* Generate and emit a static constructor or destructor. WHICH must
1359 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
1360 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
1361 initialization priority for this constructor or destructor. */
1362
1363 void
1364 cgraph_build_static_cdtor (char which, tree body, int priority)
1365 {
1366 static int counter = 0;
1367 char which_buf[16];
1368 tree decl, name, resdecl;
1369
1370 /* The priority is encoded in the constructor or destructor name.
1371 collect2 will sort the names and arrange that they are called at
1372 program startup. */
1373 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
1374 name = get_file_function_name (which_buf);
1375
1376 decl = build_decl (FUNCTION_DECL, name,
1377 build_function_type (void_type_node, void_list_node));
1378 current_function_decl = decl;
1379
1380 resdecl = build_decl (RESULT_DECL, NULL_TREE, void_type_node);
1381 DECL_ARTIFICIAL (resdecl) = 1;
1382 DECL_RESULT (decl) = resdecl;
1383 DECL_CONTEXT (resdecl) = decl;
1384
1385 allocate_struct_function (decl, false);
1386
1387 TREE_STATIC (decl) = 1;
1388 TREE_USED (decl) = 1;
1389 DECL_ARTIFICIAL (decl) = 1;
1390 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1391 DECL_SAVED_TREE (decl) = body;
1392 TREE_PUBLIC (decl) = ! targetm.have_ctors_dtors;
1393 DECL_UNINLINABLE (decl) = 1;
1394
1395 DECL_INITIAL (decl) = make_node (BLOCK);
1396 TREE_USED (DECL_INITIAL (decl)) = 1;
1397
1398 DECL_SOURCE_LOCATION (decl) = input_location;
1399 cfun->function_end_locus = input_location;
1400
1401 switch (which)
1402 {
1403 case 'I':
1404 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1405 decl_init_priority_insert (decl, priority);
1406 break;
1407 case 'D':
1408 DECL_STATIC_DESTRUCTOR (decl) = 1;
1409 decl_fini_priority_insert (decl, priority);
1410 break;
1411 default:
1412 gcc_unreachable ();
1413 }
1414
1415 gimplify_function_tree (decl);
1416
1417 cgraph_add_new_function (decl, false);
1418 cgraph_mark_needed_node (cgraph_node (decl));
1419 set_cfun (NULL);
1420 }
1421
1422 void
1423 init_cgraph (void)
1424 {
1425 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1426 }
1427
1428 /* The edges representing the callers of the NEW_VERSION node were
1429 fixed by cgraph_function_versioning (), now the call_expr in their
1430 respective tree code should be updated to call the NEW_VERSION. */
1431
1432 static void
1433 update_call_expr (struct cgraph_node *new_version)
1434 {
1435 struct cgraph_edge *e;
1436
1437 gcc_assert (new_version);
1438
1439 /* Update the call expr on the edges to call the new version. */
1440 for (e = new_version->callers; e; e = e->next_caller)
1441 {
1442 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
1443 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
1444 /* Update EH information too, just in case. */
1445 if (!stmt_could_throw_p (e->call_stmt)
1446 && lookup_stmt_eh_region_fn (inner_function, e->call_stmt))
1447 remove_stmt_from_eh_region_fn (inner_function, e->call_stmt);
1448 }
1449 }
1450
1451
1452 /* Create a new cgraph node which is the new version of
1453 OLD_VERSION node. REDIRECT_CALLERS holds the callers
1454 edges which should be redirected to point to
1455 NEW_VERSION. ALL the callees edges of OLD_VERSION
1456 are cloned to the new version node. Return the new
1457 version node. */
1458
1459 static struct cgraph_node *
1460 cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
1461 tree new_decl,
1462 VEC(cgraph_edge_p,heap) *redirect_callers)
1463 {
1464 struct cgraph_node *new_version;
1465 struct cgraph_edge *e, *new_e;
1466 struct cgraph_edge *next_callee;
1467 unsigned i;
1468
1469 gcc_assert (old_version);
1470
1471 new_version = cgraph_node (new_decl);
1472
1473 new_version->analyzed = true;
1474 new_version->local = old_version->local;
1475 new_version->global = old_version->global;
1476 new_version->rtl = new_version->rtl;
1477 new_version->reachable = true;
1478 new_version->count = old_version->count;
1479
1480 /* Clone the old node callees. Recursive calls are
1481 also cloned. */
1482 for (e = old_version->callees;e; e=e->next_callee)
1483 {
1484 new_e = cgraph_clone_edge (e, new_version, e->call_stmt, 0, e->frequency,
1485 e->loop_nest, true);
1486 new_e->count = e->count;
1487 }
1488 /* Fix recursive calls.
1489 If OLD_VERSION has a recursive call after the
1490 previous edge cloning, the new version will have an edge
1491 pointing to the old version, which is wrong;
1492 Redirect it to point to the new version. */
1493 for (e = new_version->callees ; e; e = next_callee)
1494 {
1495 next_callee = e->next_callee;
1496 if (e->callee == old_version)
1497 cgraph_redirect_edge_callee (e, new_version);
1498
1499 if (!next_callee)
1500 break;
1501 }
1502 for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
1503 {
1504 /* Redirect calls to the old version node to point to its new
1505 version. */
1506 cgraph_redirect_edge_callee (e, new_version);
1507 }
1508
1509 return new_version;
1510 }
1511
1512 /* Perform function versioning.
1513 Function versioning includes copying of the tree and
1514 a callgraph update (creating a new cgraph node and updating
1515 its callees and callers).
1516
1517 REDIRECT_CALLERS varray includes the edges to be redirected
1518 to the new version.
1519
1520 TREE_MAP is a mapping of tree nodes we want to replace with
1521 new ones (according to results of prior analysis).
1522 OLD_VERSION_NODE is the node that is versioned.
1523 It returns the new version's cgraph node.
1524 ARGS_TO_SKIP lists arguments to be omitted from functions
1525 */
1526
1527 struct cgraph_node *
1528 cgraph_function_versioning (struct cgraph_node *old_version_node,
1529 VEC(cgraph_edge_p,heap) *redirect_callers,
1530 varray_type tree_map,
1531 bitmap args_to_skip)
1532 {
1533 tree old_decl = old_version_node->decl;
1534 struct cgraph_node *new_version_node = NULL;
1535 tree new_decl;
1536
1537 if (!tree_versionable_function_p (old_decl))
1538 return NULL;
1539
1540 /* Make a new FUNCTION_DECL tree node for the
1541 new version. */
1542 if (!args_to_skip)
1543 new_decl = copy_node (old_decl);
1544 else
1545 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
1546
1547 /* Create the new version's call-graph node.
1548 and update the edges of the new node. */
1549 new_version_node =
1550 cgraph_copy_node_for_versioning (old_version_node, new_decl,
1551 redirect_callers);
1552
1553 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1554 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip);
1555
1556 /* Update the new version's properties.
1557 Make The new version visible only within this translation unit. Make sure
1558 that is not weak also.
1559 ??? We cannot use COMDAT linkage because there is no
1560 ABI support for this. */
1561 DECL_EXTERNAL (new_version_node->decl) = 0;
1562 DECL_ONE_ONLY (new_version_node->decl) = 0;
1563 TREE_PUBLIC (new_version_node->decl) = 0;
1564 DECL_COMDAT (new_version_node->decl) = 0;
1565 DECL_WEAK (new_version_node->decl) = 0;
1566 DECL_VIRTUAL_P (new_version_node->decl) = 0;
1567 new_version_node->local.externally_visible = 0;
1568 new_version_node->local.local = 1;
1569 new_version_node->lowered = true;
1570
1571 /* Update the call_expr on the edges to call the new version node. */
1572 update_call_expr (new_version_node);
1573
1574 cgraph_call_function_insertion_hooks (new_version_node);
1575 return new_version_node;
1576 }
1577
1578 /* Produce separate function body for inline clones so the offline copy can be
1579 modified without affecting them. */
1580 struct cgraph_node *
1581 save_inline_function_body (struct cgraph_node *node)
1582 {
1583 struct cgraph_node *first_clone;
1584
1585 gcc_assert (node == cgraph_node (node->decl));
1586
1587 cgraph_lower_function (node);
1588
1589 first_clone = node->next_clone;
1590
1591 first_clone->decl = copy_node (node->decl);
1592 node->next_clone = NULL;
1593 first_clone->prev_clone = NULL;
1594 cgraph_insert_node_to_hashtable (first_clone);
1595 gcc_assert (first_clone == cgraph_node (first_clone->decl));
1596
1597 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1598 tree_function_versioning (node->decl, first_clone->decl, NULL, true, NULL);
1599
1600 DECL_EXTERNAL (first_clone->decl) = 0;
1601 DECL_ONE_ONLY (first_clone->decl) = 0;
1602 TREE_PUBLIC (first_clone->decl) = 0;
1603 DECL_COMDAT (first_clone->decl) = 0;
1604
1605 for (node = first_clone->next_clone; node; node = node->next_clone)
1606 node->decl = first_clone->decl;
1607 #ifdef ENABLE_CHECKING
1608 verify_cgraph_node (first_clone);
1609 #endif
1610 return first_clone;
1611 }
1612
1613 #include "gt-cgraphunit.h"