re PR middle-end/37293 (r139762 breaks libstdc++ build on darwin)
[gcc.git] / gcc / cgraphunit.c
1 /* Callgraph based interprocedural optimizations.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This module implements main driver of compilation process as well as
23 few basic interprocedural optimizers.
24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
35 (There is one exception needed for implementing GCC extern inline
36 function.)
37
38 - varpool_finalize_variable
39
40 This function has same behavior as the above but is used for static
41 variables.
42
43 - cgraph_finalize_compilation_unit
44
45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
47
48 In the the call-graph construction and local function
49 analysis takes place here. Bodies of unreachable functions are released
50 to conserve memory usage.
51
52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
54
55 - cgraph_optimize
56
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
61
62 - cgraph_mark_needed_node
63 - varpool_mark_needed_node
64
65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
69 used by C++ frontend to explicitly mark the keyed methods.
70
71 - analyze_expr callback
72
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
76
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
79
80 Analyzing of all functions is deferred
81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
82
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
90
91 The intra-procedural information is produced and its existence
92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
95
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
98
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
103
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
106 where reference has been optimized out. */
107
108
109 #include "config.h"
110 #include "system.h"
111 #include "coretypes.h"
112 #include "tm.h"
113 #include "tree.h"
114 #include "rtl.h"
115 #include "tree-flow.h"
116 #include "tree-inline.h"
117 #include "langhooks.h"
118 #include "pointer-set.h"
119 #include "toplev.h"
120 #include "flags.h"
121 #include "ggc.h"
122 #include "debug.h"
123 #include "target.h"
124 #include "cgraph.h"
125 #include "diagnostic.h"
126 #include "timevar.h"
127 #include "params.h"
128 #include "fibheap.h"
129 #include "c-common.h"
130 #include "intl.h"
131 #include "function.h"
132 #include "ipa-prop.h"
133 #include "gimple.h"
134 #include "tree-iterator.h"
135 #include "tree-pass.h"
136 #include "output.h"
137
138 static void cgraph_expand_all_functions (void);
139 static void cgraph_mark_functions_to_output (void);
140 static void cgraph_expand_function (struct cgraph_node *);
141 static void cgraph_output_pending_asms (void);
142
143 static FILE *cgraph_dump_file;
144
145 /* A vector of FUNCTION_DECLs declared as static constructors. */
146 static GTY (()) VEC(tree, gc) *static_ctors;
147 /* A vector of FUNCTION_DECLs declared as static destructors. */
148 static GTY (()) VEC(tree, gc) *static_dtors;
149
150 /* When target does not have ctors and dtors, we call all constructor
151 and destructor by special initialization/destruction function
152 recognized by collect2.
153
154 When we are going to build this function, collect all constructors and
155 destructors and turn them into normal functions. */
156
157 static void
158 record_cdtor_fn (tree fndecl)
159 {
160 struct cgraph_node *node;
161 if (targetm.have_ctors_dtors
162 || (!DECL_STATIC_CONSTRUCTOR (fndecl)
163 && !DECL_STATIC_DESTRUCTOR (fndecl)))
164 return;
165
166 if (DECL_STATIC_CONSTRUCTOR (fndecl))
167 {
168 VEC_safe_push (tree, gc, static_ctors, fndecl);
169 DECL_STATIC_CONSTRUCTOR (fndecl) = 0;
170 }
171 if (DECL_STATIC_DESTRUCTOR (fndecl))
172 {
173 VEC_safe_push (tree, gc, static_dtors, fndecl);
174 DECL_STATIC_DESTRUCTOR (fndecl) = 0;
175 }
176 node = cgraph_node (fndecl);
177 node->local.disregard_inline_limits = 1;
178 cgraph_mark_reachable_node (node);
179 }
180
181 /* Define global constructors/destructor functions for the CDTORS, of
182 which they are LEN. The CDTORS are sorted by initialization
183 priority. If CTOR_P is true, these are constructors; otherwise,
184 they are destructors. */
185
186 static void
187 build_cdtor (bool ctor_p, tree *cdtors, size_t len)
188 {
189 size_t i;
190
191 i = 0;
192 while (i < len)
193 {
194 tree body;
195 tree fn;
196 priority_type priority;
197
198 priority = 0;
199 body = NULL_TREE;
200 /* Find the next batch of constructors/destructors with the same
201 initialization priority. */
202 do
203 {
204 priority_type p;
205 fn = cdtors[i];
206 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
207 if (!body)
208 priority = p;
209 else if (p != priority)
210 break;
211 append_to_statement_list (build_function_call_expr (fn, 0),
212 &body);
213 ++i;
214 }
215 while (i < len);
216 gcc_assert (body != NULL_TREE);
217 /* Generate a function to call all the function of like
218 priority. */
219 cgraph_build_static_cdtor (ctor_p ? 'I' : 'D', body, priority);
220 }
221 }
222
223 /* Comparison function for qsort. P1 and P2 are actually of type
224 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
225 used to determine the sort order. */
226
227 static int
228 compare_ctor (const void *p1, const void *p2)
229 {
230 tree f1;
231 tree f2;
232 int priority1;
233 int priority2;
234
235 f1 = *(const tree *)p1;
236 f2 = *(const tree *)p2;
237 priority1 = DECL_INIT_PRIORITY (f1);
238 priority2 = DECL_INIT_PRIORITY (f2);
239
240 if (priority1 < priority2)
241 return -1;
242 else if (priority1 > priority2)
243 return 1;
244 else
245 /* Ensure a stable sort. */
246 return (const tree *)p1 - (const tree *)p2;
247 }
248
249 /* Comparison function for qsort. P1 and P2 are actually of type
250 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
251 used to determine the sort order. */
252
253 static int
254 compare_dtor (const void *p1, const void *p2)
255 {
256 tree f1;
257 tree f2;
258 int priority1;
259 int priority2;
260
261 f1 = *(const tree *)p1;
262 f2 = *(const tree *)p2;
263 priority1 = DECL_FINI_PRIORITY (f1);
264 priority2 = DECL_FINI_PRIORITY (f2);
265
266 if (priority1 < priority2)
267 return -1;
268 else if (priority1 > priority2)
269 return 1;
270 else
271 /* Ensure a stable sort. */
272 return (const tree *)p1 - (const tree *)p2;
273 }
274
275 /* Generate functions to call static constructors and destructors
276 for targets that do not support .ctors/.dtors sections. These
277 functions have magic names which are detected by collect2. */
278
279 static void
280 cgraph_build_cdtor_fns (void)
281 {
282 if (!VEC_empty (tree, static_ctors))
283 {
284 gcc_assert (!targetm.have_ctors_dtors);
285 qsort (VEC_address (tree, static_ctors),
286 VEC_length (tree, static_ctors),
287 sizeof (tree),
288 compare_ctor);
289 build_cdtor (/*ctor_p=*/true,
290 VEC_address (tree, static_ctors),
291 VEC_length (tree, static_ctors));
292 VEC_truncate (tree, static_ctors, 0);
293 }
294
295 if (!VEC_empty (tree, static_dtors))
296 {
297 gcc_assert (!targetm.have_ctors_dtors);
298 qsort (VEC_address (tree, static_dtors),
299 VEC_length (tree, static_dtors),
300 sizeof (tree),
301 compare_dtor);
302 build_cdtor (/*ctor_p=*/false,
303 VEC_address (tree, static_dtors),
304 VEC_length (tree, static_dtors));
305 VEC_truncate (tree, static_dtors, 0);
306 }
307 }
308
309 /* Determine if function DECL is needed. That is, visible to something
310 either outside this translation unit, something magic in the system
311 configury. */
312
313 static bool
314 decide_is_function_needed (struct cgraph_node *node, tree decl)
315 {
316 if (MAIN_NAME_P (DECL_NAME (decl))
317 && TREE_PUBLIC (decl))
318 {
319 node->local.externally_visible = true;
320 return true;
321 }
322
323 /* If the user told us it is used, then it must be so. */
324 if (node->local.externally_visible)
325 return true;
326
327 /* ??? If the assembler name is set by hand, it is possible to assemble
328 the name later after finalizing the function and the fact is noticed
329 in assemble_name then. This is arguably a bug. */
330 if (DECL_ASSEMBLER_NAME_SET_P (decl)
331 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
332 return true;
333
334 /* With -fkeep-inline-functions we are keeping all inline functions except
335 for extern inline ones. */
336 if (flag_keep_inline_functions
337 && DECL_DECLARED_INLINE_P (decl)
338 && !DECL_EXTERNAL (decl)
339 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl)))
340 return true;
341
342 /* If we decided it was needed before, but at the time we didn't have
343 the body of the function available, then it's still needed. We have
344 to go back and re-check its dependencies now. */
345 if (node->needed)
346 return true;
347
348 /* Externally visible functions must be output. The exception is
349 COMDAT functions that must be output only when they are needed.
350
351 When not optimizing, also output the static functions. (see
352 PR24561), but don't do so for always_inline functions, functions
353 declared inline and nested functions. These was optimized out
354 in the original implementation and it is unclear whether we want
355 to change the behavior here. */
356 if (((TREE_PUBLIC (decl)
357 || (!optimize && !node->local.disregard_inline_limits
358 && !DECL_DECLARED_INLINE_P (decl)
359 && !node->origin))
360 && !flag_whole_program)
361 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
362 return true;
363
364 /* Constructors and destructors are reachable from the runtime by
365 some mechanism. */
366 if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
367 return true;
368
369 return false;
370 }
371
372 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
373 functions into callgraph in a way so they look like ordinary reachable
374 functions inserted into callgraph already at construction time. */
375
376 bool
377 cgraph_process_new_functions (void)
378 {
379 bool output = false;
380 tree fndecl;
381 struct cgraph_node *node;
382
383 /* Note that this queue may grow as its being processed, as the new
384 functions may generate new ones. */
385 while (cgraph_new_nodes)
386 {
387 node = cgraph_new_nodes;
388 fndecl = node->decl;
389 cgraph_new_nodes = cgraph_new_nodes->next_needed;
390 switch (cgraph_state)
391 {
392 case CGRAPH_STATE_CONSTRUCTION:
393 /* At construction time we just need to finalize function and move
394 it into reachable functions list. */
395
396 node->next_needed = NULL;
397 cgraph_finalize_function (fndecl, false);
398 cgraph_mark_reachable_node (node);
399 output = true;
400 break;
401
402 case CGRAPH_STATE_IPA:
403 case CGRAPH_STATE_IPA_SSA:
404 /* When IPA optimization already started, do all essential
405 transformations that has been already performed on the whole
406 cgraph but not on this function. */
407
408 gimple_register_cfg_hooks ();
409 if (!node->analyzed)
410 cgraph_analyze_function (node);
411 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
412 current_function_decl = fndecl;
413 compute_inline_parameters (node);
414 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
415 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
416 /* When not optimizing, be sure we run early local passes anyway
417 to expand OMP. */
418 || !optimize)
419 execute_pass_list (pass_early_local_passes.pass.sub);
420 free_dominance_info (CDI_POST_DOMINATORS);
421 free_dominance_info (CDI_DOMINATORS);
422 pop_cfun ();
423 current_function_decl = NULL;
424 break;
425
426 case CGRAPH_STATE_EXPANSION:
427 /* Functions created during expansion shall be compiled
428 directly. */
429 node->output = 0;
430 cgraph_expand_function (node);
431 break;
432
433 default:
434 gcc_unreachable ();
435 break;
436 }
437 cgraph_call_function_insertion_hooks (node);
438 }
439 return output;
440 }
441
442 /* As an GCC extension we allow redefinition of the function. The
443 semantics when both copies of bodies differ is not well defined.
444 We replace the old body with new body so in unit at a time mode
445 we always use new body, while in normal mode we may end up with
446 old body inlined into some functions and new body expanded and
447 inlined in others.
448
449 ??? It may make more sense to use one body for inlining and other
450 body for expanding the function but this is difficult to do. */
451
452 static void
453 cgraph_reset_node (struct cgraph_node *node)
454 {
455 /* If node->output is set, then we have already begun whole-unit analysis.
456 This is *not* testing for whether we've already emitted the function.
457 That case can be sort-of legitimately seen with real function redefinition
458 errors. I would argue that the front end should never present us with
459 such a case, but don't enforce that for now. */
460 gcc_assert (!node->output);
461
462 /* Reset our data structures so we can analyze the function again. */
463 memset (&node->local, 0, sizeof (node->local));
464 memset (&node->global, 0, sizeof (node->global));
465 memset (&node->rtl, 0, sizeof (node->rtl));
466 node->analyzed = false;
467 node->local.redefined_extern_inline = true;
468 node->local.finalized = false;
469
470 cgraph_node_remove_callees (node);
471
472 /* We may need to re-queue the node for assembling in case
473 we already proceeded it and ignored as not needed or got
474 a re-declaration in IMA mode. */
475 if (node->reachable)
476 {
477 struct cgraph_node *n;
478
479 for (n = cgraph_nodes_queue; n; n = n->next_needed)
480 if (n == node)
481 break;
482 if (!n)
483 node->reachable = 0;
484 }
485 }
486
487 static void
488 cgraph_lower_function (struct cgraph_node *node)
489 {
490 if (node->lowered)
491 return;
492 tree_lowering_passes (node->decl);
493 node->lowered = true;
494 }
495
496 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
497 logic in effect. If NESTED is true, then our caller cannot stand to have
498 the garbage collector run at the moment. We would need to either create
499 a new GC context, or just not compile right now. */
500
501 void
502 cgraph_finalize_function (tree decl, bool nested)
503 {
504 struct cgraph_node *node = cgraph_node (decl);
505
506 if (node->local.finalized)
507 cgraph_reset_node (node);
508
509 node->pid = cgraph_max_pid ++;
510 notice_global_symbol (decl);
511 node->local.finalized = true;
512 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
513 record_cdtor_fn (node->decl);
514 if (node->nested)
515 lower_nested_functions (decl);
516 gcc_assert (!node->nested);
517
518 if (decide_is_function_needed (node, decl))
519 cgraph_mark_needed_node (node);
520
521 /* Since we reclaim unreachable nodes at the end of every language
522 level unit, we need to be conservative about possible entry points
523 there. */
524 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)))
525 cgraph_mark_reachable_node (node);
526
527 /* If we've not yet emitted decl, tell the debug info about it. */
528 if (!TREE_ASM_WRITTEN (decl))
529 (*debug_hooks->deferred_inline_function) (decl);
530
531 /* Possibly warn about unused parameters. */
532 if (warn_unused_parameter)
533 do_warn_unused_parameter (decl);
534
535 if (!nested)
536 ggc_collect ();
537 }
538
539 /* C99 extern inline keywords allow changing of declaration after function
540 has been finalized. We need to re-decide if we want to mark the function as
541 needed then. */
542
543 void
544 cgraph_mark_if_needed (tree decl)
545 {
546 struct cgraph_node *node = cgraph_node (decl);
547 if (node->local.finalized && decide_is_function_needed (node, decl))
548 cgraph_mark_needed_node (node);
549 }
550
551 /* Verify cgraph nodes of given cgraph node. */
552 void
553 verify_cgraph_node (struct cgraph_node *node)
554 {
555 struct cgraph_edge *e;
556 struct cgraph_node *main_clone;
557 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
558 struct function *saved_cfun = cfun;
559 basic_block this_block;
560 gimple_stmt_iterator gsi;
561 bool error_found = false;
562
563 if (errorcount || sorrycount)
564 return;
565
566 timevar_push (TV_CGRAPH_VERIFY);
567 /* debug_generic_stmt needs correct cfun */
568 set_cfun (this_cfun);
569 for (e = node->callees; e; e = e->next_callee)
570 if (e->aux)
571 {
572 error ("aux field set for edge %s->%s",
573 cgraph_node_name (e->caller), cgraph_node_name (e->callee));
574 error_found = true;
575 }
576 if (node->count < 0)
577 {
578 error ("Execution count is negative");
579 error_found = true;
580 }
581 for (e = node->callers; e; e = e->next_caller)
582 {
583 if (e->count < 0)
584 {
585 error ("caller edge count is negative");
586 error_found = true;
587 }
588 if (e->frequency < 0)
589 {
590 error ("caller edge frequency is negative");
591 error_found = true;
592 }
593 if (e->frequency > CGRAPH_FREQ_MAX)
594 {
595 error ("caller edge frequency is too large");
596 error_found = true;
597 }
598 if (!e->inline_failed)
599 {
600 if (node->global.inlined_to
601 != (e->caller->global.inlined_to
602 ? e->caller->global.inlined_to : e->caller))
603 {
604 error ("inlined_to pointer is wrong");
605 error_found = true;
606 }
607 if (node->callers->next_caller)
608 {
609 error ("multiple inline callers");
610 error_found = true;
611 }
612 }
613 else
614 if (node->global.inlined_to)
615 {
616 error ("inlined_to pointer set for noninline callers");
617 error_found = true;
618 }
619 }
620 if (!node->callers && node->global.inlined_to)
621 {
622 error ("inlined_to pointer is set but no predecessors found");
623 error_found = true;
624 }
625 if (node->global.inlined_to == node)
626 {
627 error ("inlined_to pointer refers to itself");
628 error_found = true;
629 }
630
631 for (main_clone = cgraph_node (node->decl); main_clone;
632 main_clone = main_clone->next_clone)
633 if (main_clone == node)
634 break;
635 if (!cgraph_node (node->decl))
636 {
637 error ("node not found in cgraph_hash");
638 error_found = true;
639 }
640
641 if (node->analyzed
642 && !TREE_ASM_WRITTEN (node->decl)
643 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to))
644 {
645 if (this_cfun->cfg)
646 {
647 /* The nodes we're interested in are never shared, so walk
648 the tree ignoring duplicates. */
649 struct pointer_set_t *visited_nodes = pointer_set_create ();
650 /* Reach the trees by walking over the CFG, and note the
651 enclosing basic-blocks in the call edges. */
652 FOR_EACH_BB_FN (this_block, this_cfun)
653 for (gsi = gsi_start_bb (this_block);
654 !gsi_end_p (gsi);
655 gsi_next (&gsi))
656 {
657 gimple stmt = gsi_stmt (gsi);
658 tree decl;
659 if (is_gimple_call (stmt) && (decl = gimple_call_fndecl (stmt)))
660 {
661 struct cgraph_edge *e = cgraph_edge (node, stmt);
662 if (e)
663 {
664 if (e->aux)
665 {
666 error ("shared call_stmt:");
667 debug_gimple_stmt (stmt);
668 error_found = true;
669 }
670 if (e->callee->decl != cgraph_node (decl)->decl
671 && e->inline_failed)
672 {
673 error ("edge points to wrong declaration:");
674 debug_tree (e->callee->decl);
675 fprintf (stderr," Instead of:");
676 debug_tree (decl);
677 }
678 e->aux = (void *)1;
679 }
680 else
681 {
682 error ("missing callgraph edge for call stmt:");
683 debug_gimple_stmt (stmt);
684 error_found = true;
685 }
686 }
687 }
688 pointer_set_destroy (visited_nodes);
689 }
690 else
691 /* No CFG available?! */
692 gcc_unreachable ();
693
694 for (e = node->callees; e; e = e->next_callee)
695 {
696 if (!e->aux && !e->indirect_call)
697 {
698 error ("edge %s->%s has no corresponding call_stmt",
699 cgraph_node_name (e->caller),
700 cgraph_node_name (e->callee));
701 debug_gimple_stmt (e->call_stmt);
702 error_found = true;
703 }
704 e->aux = 0;
705 }
706 }
707 if (error_found)
708 {
709 dump_cgraph_node (stderr, node);
710 internal_error ("verify_cgraph_node failed");
711 }
712 set_cfun (saved_cfun);
713 timevar_pop (TV_CGRAPH_VERIFY);
714 }
715
716 /* Verify whole cgraph structure. */
717 void
718 verify_cgraph (void)
719 {
720 struct cgraph_node *node;
721
722 if (sorrycount || errorcount)
723 return;
724
725 for (node = cgraph_nodes; node; node = node->next)
726 verify_cgraph_node (node);
727 }
728
729 /* Output all asm statements we have stored up to be output. */
730
731 static void
732 cgraph_output_pending_asms (void)
733 {
734 struct cgraph_asm_node *can;
735
736 if (errorcount || sorrycount)
737 return;
738
739 for (can = cgraph_asm_nodes; can; can = can->next)
740 assemble_asm (can->asm_str);
741 cgraph_asm_nodes = NULL;
742 }
743
744 /* Analyze the function scheduled to be output. */
745 void
746 cgraph_analyze_function (struct cgraph_node *node)
747 {
748 tree decl = node->decl;
749
750 current_function_decl = decl;
751 push_cfun (DECL_STRUCT_FUNCTION (decl));
752 cgraph_lower_function (node);
753 node->analyzed = true;
754
755 pop_cfun ();
756 current_function_decl = NULL;
757 }
758
759 /* Look for externally_visible and used attributes and mark cgraph nodes
760 accordingly.
761
762 We cannot mark the nodes at the point the attributes are processed (in
763 handle_*_attribute) because the copy of the declarations available at that
764 point may not be canonical. For example, in:
765
766 void f();
767 void f() __attribute__((used));
768
769 the declaration we see in handle_used_attribute will be the second
770 declaration -- but the front end will subsequently merge that declaration
771 with the original declaration and discard the second declaration.
772
773 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
774
775 void f() {}
776 void f() __attribute__((externally_visible));
777
778 is valid.
779
780 So, we walk the nodes at the end of the translation unit, applying the
781 attributes at that point. */
782
783 static void
784 process_function_and_variable_attributes (struct cgraph_node *first,
785 struct varpool_node *first_var)
786 {
787 struct cgraph_node *node;
788 struct varpool_node *vnode;
789
790 for (node = cgraph_nodes; node != first; node = node->next)
791 {
792 tree decl = node->decl;
793 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
794 {
795 mark_decl_referenced (decl);
796 if (node->local.finalized)
797 cgraph_mark_needed_node (node);
798 }
799 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
800 {
801 if (! TREE_PUBLIC (node->decl))
802 warning (OPT_Wattributes,
803 "%J%<externally_visible%> attribute have effect only on public objects",
804 node->decl);
805 else
806 {
807 if (node->local.finalized)
808 cgraph_mark_needed_node (node);
809 node->local.externally_visible = true;
810 }
811 }
812 }
813 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
814 {
815 tree decl = vnode->decl;
816 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
817 {
818 mark_decl_referenced (decl);
819 if (vnode->finalized)
820 varpool_mark_needed_node (vnode);
821 }
822 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
823 {
824 if (! TREE_PUBLIC (vnode->decl))
825 warning (OPT_Wattributes,
826 "%J%<externally_visible%> attribute have effect only on public objects",
827 vnode->decl);
828 else
829 {
830 if (vnode->finalized)
831 varpool_mark_needed_node (vnode);
832 vnode->externally_visible = true;
833 }
834 }
835 }
836 }
837
838 /* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
839 each reachable functions) and build cgraph.
840 The function can be called multiple times after inserting new nodes
841 into beginning of queue. Just the new part of queue is re-scanned then. */
842
843 static void
844 cgraph_analyze_functions (void)
845 {
846 /* Keep track of already processed nodes when called multiple times for
847 intermodule optimization. */
848 static struct cgraph_node *first_analyzed;
849 struct cgraph_node *first_processed = first_analyzed;
850 static struct varpool_node *first_analyzed_var;
851 struct cgraph_node *node, *next;
852
853 process_function_and_variable_attributes (first_processed,
854 first_analyzed_var);
855 first_processed = cgraph_nodes;
856 first_analyzed_var = varpool_nodes;
857 varpool_analyze_pending_decls ();
858 if (cgraph_dump_file)
859 {
860 fprintf (cgraph_dump_file, "Initial entry points:");
861 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
862 if (node->needed)
863 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
864 fprintf (cgraph_dump_file, "\n");
865 }
866 cgraph_process_new_functions ();
867
868 /* Propagate reachability flag and lower representation of all reachable
869 functions. In the future, lowering will introduce new functions and
870 new entry points on the way (by template instantiation and virtual
871 method table generation for instance). */
872 while (cgraph_nodes_queue)
873 {
874 struct cgraph_edge *edge;
875 tree decl = cgraph_nodes_queue->decl;
876
877 node = cgraph_nodes_queue;
878 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
879 node->next_needed = NULL;
880
881 /* ??? It is possible to create extern inline function and later using
882 weak alias attribute to kill its body. See
883 gcc.c-torture/compile/20011119-1.c */
884 if (!DECL_STRUCT_FUNCTION (decl))
885 {
886 cgraph_reset_node (node);
887 continue;
888 }
889
890 gcc_assert (!node->analyzed && node->reachable);
891 gcc_assert (gimple_body (decl));
892
893 cgraph_analyze_function (node);
894
895 for (edge = node->callees; edge; edge = edge->next_callee)
896 if (!edge->callee->reachable)
897 cgraph_mark_reachable_node (edge->callee);
898
899 /* We finalize local static variables during constructing callgraph
900 edges. Process their attributes too. */
901 process_function_and_variable_attributes (first_processed,
902 first_analyzed_var);
903 first_processed = cgraph_nodes;
904 first_analyzed_var = varpool_nodes;
905 varpool_analyze_pending_decls ();
906 cgraph_process_new_functions ();
907 }
908
909 /* Collect entry points to the unit. */
910 if (cgraph_dump_file)
911 {
912 fprintf (cgraph_dump_file, "Unit entry points:");
913 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
914 if (node->needed)
915 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
916 fprintf (cgraph_dump_file, "\n\nInitial ");
917 dump_cgraph (cgraph_dump_file);
918 }
919
920 if (cgraph_dump_file)
921 fprintf (cgraph_dump_file, "\nReclaiming functions:");
922
923 for (node = cgraph_nodes; node != first_analyzed; node = next)
924 {
925 tree decl = node->decl;
926 next = node->next;
927
928 if (node->local.finalized && !gimple_has_body_p (decl))
929 cgraph_reset_node (node);
930
931 if (!node->reachable && gimple_has_body_p (decl))
932 {
933 if (cgraph_dump_file)
934 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
935 cgraph_remove_node (node);
936 continue;
937 }
938 else
939 node->next_needed = NULL;
940 gcc_assert (!node->local.finalized || gimple_has_body_p (decl));
941 gcc_assert (node->analyzed == node->local.finalized);
942 }
943 if (cgraph_dump_file)
944 {
945 fprintf (cgraph_dump_file, "\n\nReclaimed ");
946 dump_cgraph (cgraph_dump_file);
947 }
948 first_analyzed = cgraph_nodes;
949 ggc_collect ();
950 }
951
952 /* Analyze the whole compilation unit once it is parsed completely. */
953
954 void
955 cgraph_finalize_compilation_unit (void)
956 {
957 if (errorcount || sorrycount)
958 return;
959
960 finish_aliases_1 ();
961
962 if (!quiet_flag)
963 {
964 fprintf (stderr, "\nAnalyzing compilation unit\n");
965 fflush (stderr);
966 }
967
968 timevar_push (TV_CGRAPH);
969 cgraph_analyze_functions ();
970 timevar_pop (TV_CGRAPH);
971 }
972 /* Figure out what functions we want to assemble. */
973
974 static void
975 cgraph_mark_functions_to_output (void)
976 {
977 struct cgraph_node *node;
978
979 for (node = cgraph_nodes; node; node = node->next)
980 {
981 tree decl = node->decl;
982 struct cgraph_edge *e;
983
984 gcc_assert (!node->output);
985
986 for (e = node->callers; e; e = e->next_caller)
987 if (e->inline_failed)
988 break;
989
990 /* We need to output all local functions that are used and not
991 always inlined, as well as those that are reachable from
992 outside the current compilation unit. */
993 if (node->analyzed
994 && !node->global.inlined_to
995 && (node->needed
996 || (e && node->reachable))
997 && !TREE_ASM_WRITTEN (decl)
998 && !DECL_EXTERNAL (decl))
999 node->output = 1;
1000 else
1001 {
1002 /* We should've reclaimed all functions that are not needed. */
1003 #ifdef ENABLE_CHECKING
1004 if (!node->global.inlined_to
1005 && gimple_has_body_p (decl)
1006 && !DECL_EXTERNAL (decl))
1007 {
1008 dump_cgraph_node (stderr, node);
1009 internal_error ("failed to reclaim unneeded function");
1010 }
1011 #endif
1012 gcc_assert (node->global.inlined_to
1013 || !gimple_has_body_p (decl)
1014 || DECL_EXTERNAL (decl));
1015
1016 }
1017
1018 }
1019 }
1020
1021 /* Expand function specified by NODE. */
1022
1023 static void
1024 cgraph_expand_function (struct cgraph_node *node)
1025 {
1026 tree decl = node->decl;
1027
1028 /* We ought to not compile any inline clones. */
1029 gcc_assert (!node->global.inlined_to);
1030
1031 announce_function (decl);
1032
1033 gcc_assert (node->lowered);
1034
1035 /* Generate RTL for the body of DECL. */
1036 if (lang_hooks.callgraph.emit_associated_thunks)
1037 lang_hooks.callgraph.emit_associated_thunks (decl);
1038 tree_rest_of_compilation (decl);
1039
1040 /* Make sure that BE didn't give up on compiling. */
1041 gcc_assert (TREE_ASM_WRITTEN (decl));
1042 current_function_decl = NULL;
1043 gcc_assert (!cgraph_preserve_function_body_p (decl));
1044 cgraph_release_function_body (node);
1045 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1046 points to the dead function body. */
1047 cgraph_node_remove_callees (node);
1048
1049 cgraph_function_flags_ready = true;
1050 }
1051
1052 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
1053
1054 bool
1055 cgraph_inline_p (struct cgraph_edge *e, const char **reason)
1056 {
1057 *reason = e->inline_failed;
1058 return !e->inline_failed;
1059 }
1060
1061
1062
1063 /* Expand all functions that must be output.
1064
1065 Attempt to topologically sort the nodes so function is output when
1066 all called functions are already assembled to allow data to be
1067 propagated across the callgraph. Use a stack to get smaller distance
1068 between a function and its callees (later we may choose to use a more
1069 sophisticated algorithm for function reordering; we will likely want
1070 to use subsections to make the output functions appear in top-down
1071 order). */
1072
1073 static void
1074 cgraph_expand_all_functions (void)
1075 {
1076 struct cgraph_node *node;
1077 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1078 int order_pos, new_order_pos = 0;
1079 int i;
1080
1081 order_pos = cgraph_postorder (order);
1082 gcc_assert (order_pos == cgraph_n_nodes);
1083
1084 /* Garbage collector may remove inline clones we eliminate during
1085 optimization. So we must be sure to not reference them. */
1086 for (i = 0; i < order_pos; i++)
1087 if (order[i]->output)
1088 order[new_order_pos++] = order[i];
1089
1090 for (i = new_order_pos - 1; i >= 0; i--)
1091 {
1092 node = order[i];
1093 if (node->output)
1094 {
1095 gcc_assert (node->reachable);
1096 node->output = 0;
1097 cgraph_expand_function (node);
1098 }
1099 }
1100 cgraph_process_new_functions ();
1101
1102 free (order);
1103
1104 }
1105
1106 /* This is used to sort the node types by the cgraph order number. */
1107
1108 struct cgraph_order_sort
1109 {
1110 enum { ORDER_UNDEFINED = 0, ORDER_FUNCTION, ORDER_VAR, ORDER_ASM } kind;
1111 union
1112 {
1113 struct cgraph_node *f;
1114 struct varpool_node *v;
1115 struct cgraph_asm_node *a;
1116 } u;
1117 };
1118
1119 /* Output all functions, variables, and asm statements in the order
1120 according to their order fields, which is the order in which they
1121 appeared in the file. This implements -fno-toplevel-reorder. In
1122 this mode we may output functions and variables which don't really
1123 need to be output. */
1124
1125 static void
1126 cgraph_output_in_order (void)
1127 {
1128 int max;
1129 size_t size;
1130 struct cgraph_order_sort *nodes;
1131 int i;
1132 struct cgraph_node *pf;
1133 struct varpool_node *pv;
1134 struct cgraph_asm_node *pa;
1135
1136 max = cgraph_order;
1137 size = max * sizeof (struct cgraph_order_sort);
1138 nodes = (struct cgraph_order_sort *) alloca (size);
1139 memset (nodes, 0, size);
1140
1141 varpool_analyze_pending_decls ();
1142
1143 for (pf = cgraph_nodes; pf; pf = pf->next)
1144 {
1145 if (pf->output)
1146 {
1147 i = pf->order;
1148 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1149 nodes[i].kind = ORDER_FUNCTION;
1150 nodes[i].u.f = pf;
1151 }
1152 }
1153
1154 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
1155 {
1156 i = pv->order;
1157 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1158 nodes[i].kind = ORDER_VAR;
1159 nodes[i].u.v = pv;
1160 }
1161
1162 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1163 {
1164 i = pa->order;
1165 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1166 nodes[i].kind = ORDER_ASM;
1167 nodes[i].u.a = pa;
1168 }
1169
1170 /* In toplevel reorder mode we output all statics; mark them as needed. */
1171 for (i = 0; i < max; ++i)
1172 {
1173 if (nodes[i].kind == ORDER_VAR)
1174 {
1175 varpool_mark_needed_node (nodes[i].u.v);
1176 }
1177 }
1178 varpool_empty_needed_queue ();
1179
1180 for (i = 0; i < max; ++i)
1181 {
1182 switch (nodes[i].kind)
1183 {
1184 case ORDER_FUNCTION:
1185 nodes[i].u.f->output = 0;
1186 cgraph_expand_function (nodes[i].u.f);
1187 break;
1188
1189 case ORDER_VAR:
1190 varpool_assemble_decl (nodes[i].u.v);
1191 break;
1192
1193 case ORDER_ASM:
1194 assemble_asm (nodes[i].u.a->asm_str);
1195 break;
1196
1197 case ORDER_UNDEFINED:
1198 break;
1199
1200 default:
1201 gcc_unreachable ();
1202 }
1203 }
1204
1205 cgraph_asm_nodes = NULL;
1206 }
1207
1208 /* Return true when function body of DECL still needs to be kept around
1209 for later re-use. */
1210 bool
1211 cgraph_preserve_function_body_p (tree decl)
1212 {
1213 struct cgraph_node *node;
1214
1215 gcc_assert (cgraph_global_info_ready);
1216 /* Look if there is any clone around. */
1217 for (node = cgraph_node (decl); node; node = node->next_clone)
1218 if (node->global.inlined_to)
1219 return true;
1220 return false;
1221 }
1222
1223 static void
1224 ipa_passes (void)
1225 {
1226 set_cfun (NULL);
1227 current_function_decl = NULL;
1228 gimple_register_cfg_hooks ();
1229 bitmap_obstack_initialize (NULL);
1230 execute_ipa_pass_list (all_ipa_passes);
1231 bitmap_obstack_release (NULL);
1232 }
1233
1234 /* Perform simple optimizations based on callgraph. */
1235
1236 void
1237 cgraph_optimize (void)
1238 {
1239 if (errorcount || sorrycount)
1240 return;
1241
1242 #ifdef ENABLE_CHECKING
1243 verify_cgraph ();
1244 #endif
1245
1246 /* Call functions declared with the "constructor" or "destructor"
1247 attribute. */
1248 cgraph_build_cdtor_fns ();
1249
1250 /* Frontend may output common variables after the unit has been finalized.
1251 It is safe to deal with them here as they are always zero initialized. */
1252 varpool_analyze_pending_decls ();
1253 cgraph_analyze_functions ();
1254
1255 timevar_push (TV_CGRAPHOPT);
1256 if (pre_ipa_mem_report)
1257 {
1258 fprintf (stderr, "Memory consumption before IPA\n");
1259 dump_memory_report (false);
1260 }
1261 if (!quiet_flag)
1262 fprintf (stderr, "Performing interprocedural optimizations\n");
1263 cgraph_state = CGRAPH_STATE_IPA;
1264
1265 /* Don't run the IPA passes if there was any error or sorry messages. */
1266 if (errorcount == 0 && sorrycount == 0)
1267 ipa_passes ();
1268
1269 /* This pass remove bodies of extern inline functions we never inlined.
1270 Do this later so other IPA passes see what is really going on. */
1271 cgraph_remove_unreachable_nodes (false, dump_file);
1272 cgraph_global_info_ready = true;
1273 if (cgraph_dump_file)
1274 {
1275 fprintf (cgraph_dump_file, "Optimized ");
1276 dump_cgraph (cgraph_dump_file);
1277 dump_varpool (cgraph_dump_file);
1278 }
1279 if (post_ipa_mem_report)
1280 {
1281 fprintf (stderr, "Memory consumption after IPA\n");
1282 dump_memory_report (false);
1283 }
1284 timevar_pop (TV_CGRAPHOPT);
1285
1286 /* Output everything. */
1287 if (!quiet_flag)
1288 fprintf (stderr, "Assembling functions:\n");
1289 #ifdef ENABLE_CHECKING
1290 verify_cgraph ();
1291 #endif
1292
1293 cgraph_mark_functions_to_output ();
1294
1295 cgraph_state = CGRAPH_STATE_EXPANSION;
1296 if (!flag_toplevel_reorder)
1297 cgraph_output_in_order ();
1298 else
1299 {
1300 cgraph_output_pending_asms ();
1301
1302 cgraph_expand_all_functions ();
1303 varpool_remove_unreferenced_decls ();
1304
1305 varpool_assemble_pending_decls ();
1306 }
1307 varpool_output_debug_info ();
1308 cgraph_process_new_functions ();
1309 cgraph_state = CGRAPH_STATE_FINISHED;
1310
1311 if (cgraph_dump_file)
1312 {
1313 fprintf (cgraph_dump_file, "\nFinal ");
1314 dump_cgraph (cgraph_dump_file);
1315 }
1316 #ifdef ENABLE_CHECKING
1317 verify_cgraph ();
1318 /* Double check that all inline clones are gone and that all
1319 function bodies have been released from memory. */
1320 if (!(sorrycount || errorcount))
1321 {
1322 struct cgraph_node *node;
1323 bool error_found = false;
1324
1325 for (node = cgraph_nodes; node; node = node->next)
1326 if (node->analyzed
1327 && (node->global.inlined_to
1328 || gimple_has_body_p (node->decl)))
1329 {
1330 error_found = true;
1331 dump_cgraph_node (stderr, node);
1332 }
1333 if (error_found)
1334 internal_error ("nodes with unreleased memory found");
1335 }
1336 #endif
1337 }
1338 /* Generate and emit a static constructor or destructor. WHICH must
1339 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
1340 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
1341 initialization priority for this constructor or destructor. */
1342
1343 void
1344 cgraph_build_static_cdtor (char which, tree body, int priority)
1345 {
1346 static int counter = 0;
1347 char which_buf[16];
1348 tree decl, name, resdecl;
1349
1350 /* The priority is encoded in the constructor or destructor name.
1351 collect2 will sort the names and arrange that they are called at
1352 program startup. */
1353 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
1354 name = get_file_function_name (which_buf);
1355
1356 decl = build_decl (FUNCTION_DECL, name,
1357 build_function_type (void_type_node, void_list_node));
1358 current_function_decl = decl;
1359
1360 resdecl = build_decl (RESULT_DECL, NULL_TREE, void_type_node);
1361 DECL_ARTIFICIAL (resdecl) = 1;
1362 DECL_RESULT (decl) = resdecl;
1363
1364 allocate_struct_function (decl, false);
1365
1366 TREE_STATIC (decl) = 1;
1367 TREE_USED (decl) = 1;
1368 DECL_ARTIFICIAL (decl) = 1;
1369 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1370 DECL_SAVED_TREE (decl) = body;
1371 TREE_PUBLIC (decl) = ! targetm.have_ctors_dtors;
1372 DECL_UNINLINABLE (decl) = 1;
1373
1374 DECL_INITIAL (decl) = make_node (BLOCK);
1375 TREE_USED (DECL_INITIAL (decl)) = 1;
1376
1377 DECL_SOURCE_LOCATION (decl) = input_location;
1378 cfun->function_end_locus = input_location;
1379
1380 switch (which)
1381 {
1382 case 'I':
1383 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1384 decl_init_priority_insert (decl, priority);
1385 break;
1386 case 'D':
1387 DECL_STATIC_DESTRUCTOR (decl) = 1;
1388 decl_fini_priority_insert (decl, priority);
1389 break;
1390 default:
1391 gcc_unreachable ();
1392 }
1393
1394 gimplify_function_tree (decl);
1395
1396 cgraph_add_new_function (decl, false);
1397 cgraph_mark_needed_node (cgraph_node (decl));
1398 set_cfun (NULL);
1399 }
1400
1401 void
1402 init_cgraph (void)
1403 {
1404 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1405 }
1406
1407 /* The edges representing the callers of the NEW_VERSION node were
1408 fixed by cgraph_function_versioning (), now the call_expr in their
1409 respective tree code should be updated to call the NEW_VERSION. */
1410
1411 static void
1412 update_call_expr (struct cgraph_node *new_version)
1413 {
1414 struct cgraph_edge *e;
1415
1416 gcc_assert (new_version);
1417
1418 /* Update the call expr on the edges to call the new version. */
1419 for (e = new_version->callers; e; e = e->next_caller)
1420 {
1421 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
1422 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
1423 /* Update EH information too, just in case. */
1424 if (!stmt_could_throw_p (e->call_stmt)
1425 && lookup_stmt_eh_region_fn (inner_function, e->call_stmt))
1426 remove_stmt_from_eh_region_fn (inner_function, e->call_stmt);
1427 }
1428 }
1429
1430
1431 /* Create a new cgraph node which is the new version of
1432 OLD_VERSION node. REDIRECT_CALLERS holds the callers
1433 edges which should be redirected to point to
1434 NEW_VERSION. ALL the callees edges of OLD_VERSION
1435 are cloned to the new version node. Return the new
1436 version node. */
1437
1438 static struct cgraph_node *
1439 cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
1440 tree new_decl,
1441 VEC(cgraph_edge_p,heap) *redirect_callers)
1442 {
1443 struct cgraph_node *new_version;
1444 struct cgraph_edge *e, *new_e;
1445 struct cgraph_edge *next_callee;
1446 unsigned i;
1447
1448 gcc_assert (old_version);
1449
1450 new_version = cgraph_node (new_decl);
1451
1452 new_version->analyzed = true;
1453 new_version->local = old_version->local;
1454 new_version->global = old_version->global;
1455 new_version->rtl = new_version->rtl;
1456 new_version->reachable = true;
1457 new_version->count = old_version->count;
1458
1459 /* Clone the old node callees. Recursive calls are
1460 also cloned. */
1461 for (e = old_version->callees;e; e=e->next_callee)
1462 {
1463 new_e = cgraph_clone_edge (e, new_version, e->call_stmt, 0, e->frequency,
1464 e->loop_nest, true);
1465 new_e->count = e->count;
1466 }
1467 /* Fix recursive calls.
1468 If OLD_VERSION has a recursive call after the
1469 previous edge cloning, the new version will have an edge
1470 pointing to the old version, which is wrong;
1471 Redirect it to point to the new version. */
1472 for (e = new_version->callees ; e; e = next_callee)
1473 {
1474 next_callee = e->next_callee;
1475 if (e->callee == old_version)
1476 cgraph_redirect_edge_callee (e, new_version);
1477
1478 if (!next_callee)
1479 break;
1480 }
1481 for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
1482 {
1483 /* Redirect calls to the old version node to point to its new
1484 version. */
1485 cgraph_redirect_edge_callee (e, new_version);
1486 }
1487
1488 return new_version;
1489 }
1490
1491 /* Perform function versioning.
1492 Function versioning includes copying of the tree and
1493 a callgraph update (creating a new cgraph node and updating
1494 its callees and callers).
1495
1496 REDIRECT_CALLERS varray includes the edges to be redirected
1497 to the new version.
1498
1499 TREE_MAP is a mapping of tree nodes we want to replace with
1500 new ones (according to results of prior analysis).
1501 OLD_VERSION_NODE is the node that is versioned.
1502 It returns the new version's cgraph node.
1503 ARGS_TO_SKIP lists arguments to be omitted from functions
1504 */
1505
1506 struct cgraph_node *
1507 cgraph_function_versioning (struct cgraph_node *old_version_node,
1508 VEC(cgraph_edge_p,heap) *redirect_callers,
1509 varray_type tree_map,
1510 bitmap args_to_skip)
1511 {
1512 tree old_decl = old_version_node->decl;
1513 struct cgraph_node *new_version_node = NULL;
1514 tree new_decl;
1515
1516 if (!tree_versionable_function_p (old_decl))
1517 return NULL;
1518
1519 /* Make a new FUNCTION_DECL tree node for the
1520 new version. */
1521 if (!args_to_skip)
1522 new_decl = copy_node (old_decl);
1523 else
1524 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
1525
1526 /* Create the new version's call-graph node.
1527 and update the edges of the new node. */
1528 new_version_node =
1529 cgraph_copy_node_for_versioning (old_version_node, new_decl,
1530 redirect_callers);
1531
1532 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1533 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip);
1534
1535 /* Update the new version's properties.
1536 Make The new version visible only within this translation unit. Make sure
1537 that is not weak also.
1538 ??? We cannot use COMDAT linkage because there is no
1539 ABI support for this. */
1540 DECL_EXTERNAL (new_version_node->decl) = 0;
1541 DECL_ONE_ONLY (new_version_node->decl) = 0;
1542 TREE_PUBLIC (new_version_node->decl) = 0;
1543 DECL_COMDAT (new_version_node->decl) = 0;
1544 DECL_WEAK (new_version_node->decl) = 0;
1545 new_version_node->local.externally_visible = 0;
1546 new_version_node->local.local = 1;
1547 new_version_node->lowered = true;
1548
1549 /* Update the call_expr on the edges to call the new version node. */
1550 update_call_expr (new_version_node);
1551
1552 cgraph_call_function_insertion_hooks (new_version_node);
1553 return new_version_node;
1554 }
1555
1556 /* Produce separate function body for inline clones so the offline copy can be
1557 modified without affecting them. */
1558 struct cgraph_node *
1559 save_inline_function_body (struct cgraph_node *node)
1560 {
1561 struct cgraph_node *first_clone;
1562
1563 gcc_assert (node == cgraph_node (node->decl));
1564
1565 cgraph_lower_function (node);
1566
1567 first_clone = node->next_clone;
1568
1569 first_clone->decl = copy_node (node->decl);
1570 node->next_clone = NULL;
1571 first_clone->prev_clone = NULL;
1572 cgraph_insert_node_to_hashtable (first_clone);
1573 gcc_assert (first_clone == cgraph_node (first_clone->decl));
1574
1575 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1576 tree_function_versioning (node->decl, first_clone->decl, NULL, true, NULL);
1577
1578 DECL_EXTERNAL (first_clone->decl) = 0;
1579 DECL_ONE_ONLY (first_clone->decl) = 0;
1580 TREE_PUBLIC (first_clone->decl) = 0;
1581 DECL_COMDAT (first_clone->decl) = 0;
1582
1583 for (node = first_clone->next_clone; node; node = node->next_clone)
1584 node->decl = first_clone->decl;
1585 #ifdef ENABLE_CHECKING
1586 verify_cgraph_node (first_clone);
1587 #endif
1588 return first_clone;
1589 }
1590
1591 #include "gt-cgraphunit.h"