1 /* Callgraph based interprocedural optimizations.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This module implements main driver of compilation process as well as
23 few basic interprocedural optimizers.
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
28 The front-end is supposed to use following functionality:
30 - cgraph_finalize_function
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
35 (There is one exception needed for implementing GCC extern inline
38 - varpool_finalize_variable
40 This function has same behavior as the above but is used for static
43 - cgraph_finalize_compilation_unit
45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
48 In the the call-graph construction and local function
49 analysis takes place here. Bodies of unreachable functions are released
50 to conserve memory usage.
52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
62 - cgraph_mark_needed_node
63 - varpool_mark_needed_node
65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
69 used by C++ frontend to explicitly mark the keyed methods.
71 - analyze_expr callback
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
80 Analyzing of all functions is deferred
81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
91 The intra-procedural information is produced and its existence
92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
106 where reference has been optimized out. */
111 #include "coretypes.h"
115 #include "tree-flow.h"
116 #include "tree-inline.h"
117 #include "langhooks.h"
118 #include "pointer-set.h"
125 #include "diagnostic.h"
130 #include "function.h"
131 #include "ipa-prop.h"
133 #include "tree-iterator.h"
134 #include "tree-pass.h"
135 #include "tree-dump.h"
137 #include "coverage.h"
140 static void cgraph_expand_all_functions (void);
141 static void cgraph_mark_functions_to_output (void);
142 static void cgraph_expand_function (struct cgraph_node
*);
143 static void cgraph_output_pending_asms (void);
144 static void cgraph_analyze_function (struct cgraph_node
*);
146 static FILE *cgraph_dump_file
;
148 /* A vector of FUNCTION_DECLs declared as static constructors. */
149 static GTY (()) VEC(tree
, gc
) *static_ctors
;
150 /* A vector of FUNCTION_DECLs declared as static destructors. */
151 static GTY (()) VEC(tree
, gc
) *static_dtors
;
153 /* Used for vtable lookup in thunk adjusting. */
154 static GTY (()) tree vtable_entry_type
;
156 /* When target does not have ctors and dtors, we call all constructor
157 and destructor by special initialization/destruction function
158 recognized by collect2.
160 When we are going to build this function, collect all constructors and
161 destructors and turn them into normal functions. */
164 record_cdtor_fn (tree fndecl
)
166 struct cgraph_node
*node
;
167 if (targetm
.have_ctors_dtors
168 || (!DECL_STATIC_CONSTRUCTOR (fndecl
)
169 && !DECL_STATIC_DESTRUCTOR (fndecl
)))
172 if (DECL_STATIC_CONSTRUCTOR (fndecl
))
174 VEC_safe_push (tree
, gc
, static_ctors
, fndecl
);
175 DECL_STATIC_CONSTRUCTOR (fndecl
) = 0;
177 if (DECL_STATIC_DESTRUCTOR (fndecl
))
179 VEC_safe_push (tree
, gc
, static_dtors
, fndecl
);
180 DECL_STATIC_DESTRUCTOR (fndecl
) = 0;
182 node
= cgraph_node (fndecl
);
183 node
->local
.disregard_inline_limits
= 1;
184 cgraph_mark_reachable_node (node
);
187 /* Define global constructors/destructor functions for the CDTORS, of
188 which they are LEN. The CDTORS are sorted by initialization
189 priority. If CTOR_P is true, these are constructors; otherwise,
190 they are destructors. */
193 build_cdtor (bool ctor_p
, tree
*cdtors
, size_t len
)
202 priority_type priority
;
206 /* Find the next batch of constructors/destructors with the same
207 initialization priority. */
212 p
= ctor_p
? DECL_INIT_PRIORITY (fn
) : DECL_FINI_PRIORITY (fn
);
215 else if (p
!= priority
)
217 append_to_statement_list (build_function_call_expr (UNKNOWN_LOCATION
,
223 gcc_assert (body
!= NULL_TREE
);
224 /* Generate a function to call all the function of like
226 cgraph_build_static_cdtor (ctor_p
? 'I' : 'D', body
, priority
);
230 /* Comparison function for qsort. P1 and P2 are actually of type
231 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
232 used to determine the sort order. */
235 compare_ctor (const void *p1
, const void *p2
)
242 f1
= *(const tree
*)p1
;
243 f2
= *(const tree
*)p2
;
244 priority1
= DECL_INIT_PRIORITY (f1
);
245 priority2
= DECL_INIT_PRIORITY (f2
);
247 if (priority1
< priority2
)
249 else if (priority1
> priority2
)
252 /* Ensure a stable sort. */
253 return (const tree
*)p1
- (const tree
*)p2
;
256 /* Comparison function for qsort. P1 and P2 are actually of type
257 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
258 used to determine the sort order. */
261 compare_dtor (const void *p1
, const void *p2
)
268 f1
= *(const tree
*)p1
;
269 f2
= *(const tree
*)p2
;
270 priority1
= DECL_FINI_PRIORITY (f1
);
271 priority2
= DECL_FINI_PRIORITY (f2
);
273 if (priority1
< priority2
)
275 else if (priority1
> priority2
)
278 /* Ensure a stable sort. */
279 return (const tree
*)p1
- (const tree
*)p2
;
282 /* Generate functions to call static constructors and destructors
283 for targets that do not support .ctors/.dtors sections. These
284 functions have magic names which are detected by collect2. */
287 cgraph_build_cdtor_fns (void)
289 if (!VEC_empty (tree
, static_ctors
))
291 gcc_assert (!targetm
.have_ctors_dtors
);
292 qsort (VEC_address (tree
, static_ctors
),
293 VEC_length (tree
, static_ctors
),
296 build_cdtor (/*ctor_p=*/true,
297 VEC_address (tree
, static_ctors
),
298 VEC_length (tree
, static_ctors
));
299 VEC_truncate (tree
, static_ctors
, 0);
302 if (!VEC_empty (tree
, static_dtors
))
304 gcc_assert (!targetm
.have_ctors_dtors
);
305 qsort (VEC_address (tree
, static_dtors
),
306 VEC_length (tree
, static_dtors
),
309 build_cdtor (/*ctor_p=*/false,
310 VEC_address (tree
, static_dtors
),
311 VEC_length (tree
, static_dtors
));
312 VEC_truncate (tree
, static_dtors
, 0);
316 /* Determine if function DECL is needed. That is, visible to something
317 either outside this translation unit, something magic in the system
321 cgraph_decide_is_function_needed (struct cgraph_node
*node
, tree decl
)
323 /* If the user told us it is used, then it must be so. */
324 if (node
->local
.externally_visible
)
327 /* ??? If the assembler name is set by hand, it is possible to assemble
328 the name later after finalizing the function and the fact is noticed
329 in assemble_name then. This is arguably a bug. */
330 if (DECL_ASSEMBLER_NAME_SET_P (decl
)
331 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl
)))
334 /* With -fkeep-inline-functions we are keeping all inline functions except
335 for extern inline ones. */
336 if (flag_keep_inline_functions
337 && DECL_DECLARED_INLINE_P (decl
)
338 && !DECL_EXTERNAL (decl
)
339 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl
)))
342 /* If we decided it was needed before, but at the time we didn't have
343 the body of the function available, then it's still needed. We have
344 to go back and re-check its dependencies now. */
348 /* Externally visible functions must be output. The exception is
349 COMDAT functions that must be output only when they are needed.
351 When not optimizing, also output the static functions. (see
352 PR24561), but don't do so for always_inline functions, functions
353 declared inline and nested functions. These was optimized out
354 in the original implementation and it is unclear whether we want
355 to change the behavior here. */
356 if (((TREE_PUBLIC (decl
)
357 || (!optimize
&& !node
->local
.disregard_inline_limits
358 && !DECL_DECLARED_INLINE_P (decl
)
360 && !flag_whole_program
363 && !DECL_COMDAT (decl
) && !DECL_EXTERNAL (decl
))
366 /* Constructors and destructors are reachable from the runtime by
368 if (DECL_STATIC_CONSTRUCTOR (decl
) || DECL_STATIC_DESTRUCTOR (decl
))
374 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
375 functions into callgraph in a way so they look like ordinary reachable
376 functions inserted into callgraph already at construction time. */
379 cgraph_process_new_functions (void)
383 struct cgraph_node
*node
;
385 /* Note that this queue may grow as its being processed, as the new
386 functions may generate new ones. */
387 while (cgraph_new_nodes
)
389 node
= cgraph_new_nodes
;
391 cgraph_new_nodes
= cgraph_new_nodes
->next_needed
;
392 switch (cgraph_state
)
394 case CGRAPH_STATE_CONSTRUCTION
:
395 /* At construction time we just need to finalize function and move
396 it into reachable functions list. */
398 node
->next_needed
= NULL
;
399 cgraph_finalize_function (fndecl
, false);
400 cgraph_mark_reachable_node (node
);
404 case CGRAPH_STATE_IPA
:
405 case CGRAPH_STATE_IPA_SSA
:
406 /* When IPA optimization already started, do all essential
407 transformations that has been already performed on the whole
408 cgraph but not on this function. */
410 gimple_register_cfg_hooks ();
412 cgraph_analyze_function (node
);
413 push_cfun (DECL_STRUCT_FUNCTION (fndecl
));
414 current_function_decl
= fndecl
;
415 compute_inline_parameters (node
);
416 if ((cgraph_state
== CGRAPH_STATE_IPA_SSA
417 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl
)))
418 /* When not optimizing, be sure we run early local passes anyway
421 execute_pass_list (pass_early_local_passes
.pass
.sub
);
422 free_dominance_info (CDI_POST_DOMINATORS
);
423 free_dominance_info (CDI_DOMINATORS
);
425 current_function_decl
= NULL
;
428 case CGRAPH_STATE_EXPANSION
:
429 /* Functions created during expansion shall be compiled
432 cgraph_expand_function (node
);
439 cgraph_call_function_insertion_hooks (node
);
444 /* As an GCC extension we allow redefinition of the function. The
445 semantics when both copies of bodies differ is not well defined.
446 We replace the old body with new body so in unit at a time mode
447 we always use new body, while in normal mode we may end up with
448 old body inlined into some functions and new body expanded and
451 ??? It may make more sense to use one body for inlining and other
452 body for expanding the function but this is difficult to do. */
455 cgraph_reset_node (struct cgraph_node
*node
)
457 /* If node->process is set, then we have already begun whole-unit analysis.
458 This is *not* testing for whether we've already emitted the function.
459 That case can be sort-of legitimately seen with real function redefinition
460 errors. I would argue that the front end should never present us with
461 such a case, but don't enforce that for now. */
462 gcc_assert (!node
->process
);
464 /* Reset our data structures so we can analyze the function again. */
465 memset (&node
->local
, 0, sizeof (node
->local
));
466 memset (&node
->global
, 0, sizeof (node
->global
));
467 memset (&node
->rtl
, 0, sizeof (node
->rtl
));
468 node
->analyzed
= false;
469 node
->local
.redefined_extern_inline
= true;
470 node
->local
.finalized
= false;
472 cgraph_node_remove_callees (node
);
474 /* We may need to re-queue the node for assembling in case
475 we already proceeded it and ignored as not needed or got
476 a re-declaration in IMA mode. */
479 struct cgraph_node
*n
;
481 for (n
= cgraph_nodes_queue
; n
; n
= n
->next_needed
)
490 cgraph_lower_function (struct cgraph_node
*node
)
496 lower_nested_functions (node
->decl
);
497 gcc_assert (!node
->nested
);
499 tree_lowering_passes (node
->decl
);
500 node
->lowered
= true;
503 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
504 logic in effect. If NESTED is true, then our caller cannot stand to have
505 the garbage collector run at the moment. We would need to either create
506 a new GC context, or just not compile right now. */
509 cgraph_finalize_function (tree decl
, bool nested
)
511 struct cgraph_node
*node
= cgraph_node (decl
);
513 if (node
->local
.finalized
)
514 cgraph_reset_node (node
);
516 node
->pid
= cgraph_max_pid
++;
517 notice_global_symbol (decl
);
518 node
->local
.finalized
= true;
519 node
->lowered
= DECL_STRUCT_FUNCTION (decl
)->cfg
!= NULL
;
520 node
->finalized_by_frontend
= true;
521 record_cdtor_fn (node
->decl
);
523 if (cgraph_decide_is_function_needed (node
, decl
))
524 cgraph_mark_needed_node (node
);
526 /* Since we reclaim unreachable nodes at the end of every language
527 level unit, we need to be conservative about possible entry points
529 if ((TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
) && !DECL_EXTERNAL (decl
)))
530 cgraph_mark_reachable_node (node
);
532 /* If we've not yet emitted decl, tell the debug info about it. */
533 if (!TREE_ASM_WRITTEN (decl
))
534 (*debug_hooks
->deferred_inline_function
) (decl
);
536 /* Possibly warn about unused parameters. */
537 if (warn_unused_parameter
)
538 do_warn_unused_parameter (decl
);
544 /* C99 extern inline keywords allow changing of declaration after function
545 has been finalized. We need to re-decide if we want to mark the function as
549 cgraph_mark_if_needed (tree decl
)
551 struct cgraph_node
*node
= cgraph_node (decl
);
552 if (node
->local
.finalized
&& cgraph_decide_is_function_needed (node
, decl
))
553 cgraph_mark_needed_node (node
);
556 /* Return TRUE if NODE2 is equivalent to NODE or its clone. */
558 clone_of_p (struct cgraph_node
*node
, struct cgraph_node
*node2
)
560 while (node
!= node2
&& node2
)
561 node2
= node2
->clone_of
;
562 return node2
!= NULL
;
565 /* Verify cgraph nodes of given cgraph node. */
567 verify_cgraph_node (struct cgraph_node
*node
)
569 struct cgraph_edge
*e
;
570 struct function
*this_cfun
= DECL_STRUCT_FUNCTION (node
->decl
);
571 struct function
*saved_cfun
= cfun
;
572 basic_block this_block
;
573 gimple_stmt_iterator gsi
;
574 bool error_found
= false;
576 if (errorcount
|| sorrycount
)
579 timevar_push (TV_CGRAPH_VERIFY
);
580 /* debug_generic_stmt needs correct cfun */
581 set_cfun (this_cfun
);
582 for (e
= node
->callees
; e
; e
= e
->next_callee
)
585 error ("aux field set for edge %s->%s",
586 identifier_to_locale (cgraph_node_name (e
->caller
)),
587 identifier_to_locale (cgraph_node_name (e
->callee
)));
592 error ("Execution count is negative");
595 if (node
->global
.inlined_to
&& node
->local
.externally_visible
)
597 error ("Externally visible inline clone");
600 if (node
->global
.inlined_to
&& node
->address_taken
)
602 error ("Inline clone with address taken");
605 if (node
->global
.inlined_to
&& node
->needed
)
607 error ("Inline clone is needed");
610 for (e
= node
->callers
; e
; e
= e
->next_caller
)
614 error ("caller edge count is negative");
617 if (e
->frequency
< 0)
619 error ("caller edge frequency is negative");
622 if (e
->frequency
> CGRAPH_FREQ_MAX
)
624 error ("caller edge frequency is too large");
627 if (gimple_has_body_p (e
->caller
->decl
)
628 && !e
->caller
->global
.inlined_to
630 != compute_call_stmt_bb_frequency (e
->caller
->decl
,
631 gimple_bb (e
->call_stmt
))))
633 error ("caller edge frequency %i does not match BB freqency %i",
635 compute_call_stmt_bb_frequency (e
->caller
->decl
,
636 gimple_bb (e
->call_stmt
)));
639 if (!e
->inline_failed
)
641 if (node
->global
.inlined_to
642 != (e
->caller
->global
.inlined_to
643 ? e
->caller
->global
.inlined_to
: e
->caller
))
645 error ("inlined_to pointer is wrong");
648 if (node
->callers
->next_caller
)
650 error ("multiple inline callers");
655 if (node
->global
.inlined_to
)
657 error ("inlined_to pointer set for noninline callers");
661 if (!node
->callers
&& node
->global
.inlined_to
)
663 error ("inlined_to pointer is set but no predecessors found");
666 if (node
->global
.inlined_to
== node
)
668 error ("inlined_to pointer refers to itself");
672 if (!cgraph_node (node
->decl
))
674 error ("node not found in cgraph_hash");
680 struct cgraph_node
*n
;
681 for (n
= node
->clone_of
->clones
; n
; n
= n
->next_sibling_clone
)
686 error ("node has wrong clone_of");
692 struct cgraph_node
*n
;
693 for (n
= node
->clones
; n
; n
= n
->next_sibling_clone
)
694 if (n
->clone_of
!= node
)
698 error ("node has wrong clone list");
702 if ((node
->prev_sibling_clone
|| node
->next_sibling_clone
) && !node
->clone_of
)
704 error ("node is in clone list but it is not clone");
707 if (!node
->prev_sibling_clone
&& node
->clone_of
&& node
->clone_of
->clones
!= node
)
709 error ("node has wrong prev_clone pointer");
712 if (node
->prev_sibling_clone
&& node
->prev_sibling_clone
->next_sibling_clone
!= node
)
714 error ("double linked list of clones corrupted");
717 if (node
->same_comdat_group
)
719 struct cgraph_node
*n
= node
->same_comdat_group
;
721 if (!DECL_ONE_ONLY (node
->decl
))
723 error ("non-DECL_ONE_ONLY node in a same_comdat_group list");
728 error ("node is alone in a comdat group");
733 if (!n
->same_comdat_group
)
735 error ("same_comdat_group is not a circular list");
739 n
= n
->same_comdat_group
;
744 if (node
->analyzed
&& gimple_has_body_p (node
->decl
)
745 && !TREE_ASM_WRITTEN (node
->decl
)
746 && (!DECL_EXTERNAL (node
->decl
) || node
->global
.inlined_to
)
751 /* The nodes we're interested in are never shared, so walk
752 the tree ignoring duplicates. */
753 struct pointer_set_t
*visited_nodes
= pointer_set_create ();
754 /* Reach the trees by walking over the CFG, and note the
755 enclosing basic-blocks in the call edges. */
756 FOR_EACH_BB_FN (this_block
, this_cfun
)
757 for (gsi
= gsi_start_bb (this_block
);
761 gimple stmt
= gsi_stmt (gsi
);
763 if (is_gimple_call (stmt
) && (decl
= gimple_call_fndecl (stmt
)))
765 struct cgraph_edge
*e
= cgraph_edge (node
, stmt
);
770 error ("shared call_stmt:");
771 debug_gimple_stmt (stmt
);
774 if (e
->callee
->same_body_alias
)
776 error ("edge points to same body alias:");
777 debug_tree (e
->callee
->decl
);
780 else if (!node
->global
.inlined_to
781 && !e
->callee
->global
.inlined_to
782 && !clone_of_p (cgraph_node (decl
), e
->callee
))
784 error ("edge points to wrong declaration:");
785 debug_tree (e
->callee
->decl
);
786 fprintf (stderr
," Instead of:");
794 error ("missing callgraph edge for call stmt:");
795 debug_gimple_stmt (stmt
);
800 pointer_set_destroy (visited_nodes
);
803 /* No CFG available?! */
806 for (e
= node
->callees
; e
; e
= e
->next_callee
)
808 if (!e
->aux
&& !e
->indirect_call
)
810 error ("edge %s->%s has no corresponding call_stmt",
811 identifier_to_locale (cgraph_node_name (e
->caller
)),
812 identifier_to_locale (cgraph_node_name (e
->callee
)));
813 debug_gimple_stmt (e
->call_stmt
);
821 dump_cgraph_node (stderr
, node
);
822 internal_error ("verify_cgraph_node failed");
824 set_cfun (saved_cfun
);
825 timevar_pop (TV_CGRAPH_VERIFY
);
828 /* Verify whole cgraph structure. */
832 struct cgraph_node
*node
;
834 if (sorrycount
|| errorcount
)
837 for (node
= cgraph_nodes
; node
; node
= node
->next
)
838 verify_cgraph_node (node
);
841 /* Output all asm statements we have stored up to be output. */
844 cgraph_output_pending_asms (void)
846 struct cgraph_asm_node
*can
;
848 if (errorcount
|| sorrycount
)
851 for (can
= cgraph_asm_nodes
; can
; can
= can
->next
)
852 assemble_asm (can
->asm_str
);
853 cgraph_asm_nodes
= NULL
;
856 /* Analyze the function scheduled to be output. */
858 cgraph_analyze_function (struct cgraph_node
*node
)
860 tree save
= current_function_decl
;
861 tree decl
= node
->decl
;
863 current_function_decl
= decl
;
864 push_cfun (DECL_STRUCT_FUNCTION (decl
));
866 assign_assembler_name_if_neeeded (node
->decl
);
868 /* Make sure to gimplify bodies only once. During analyzing a
869 function we lower it, which will require gimplified nested
870 functions, so we can end up here with an already gimplified
872 if (!gimple_body (decl
))
873 gimplify_function_tree (decl
);
874 dump_function (TDI_generic
, decl
);
876 cgraph_lower_function (node
);
877 node
->analyzed
= true;
880 current_function_decl
= save
;
883 /* Look for externally_visible and used attributes and mark cgraph nodes
886 We cannot mark the nodes at the point the attributes are processed (in
887 handle_*_attribute) because the copy of the declarations available at that
888 point may not be canonical. For example, in:
891 void f() __attribute__((used));
893 the declaration we see in handle_used_attribute will be the second
894 declaration -- but the front end will subsequently merge that declaration
895 with the original declaration and discard the second declaration.
897 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
900 void f() __attribute__((externally_visible));
904 So, we walk the nodes at the end of the translation unit, applying the
905 attributes at that point. */
908 process_function_and_variable_attributes (struct cgraph_node
*first
,
909 struct varpool_node
*first_var
)
911 struct cgraph_node
*node
;
912 struct varpool_node
*vnode
;
914 for (node
= cgraph_nodes
; node
!= first
; node
= node
->next
)
916 tree decl
= node
->decl
;
917 if (DECL_PRESERVE_P (decl
))
919 mark_decl_referenced (decl
);
920 if (node
->local
.finalized
)
921 cgraph_mark_needed_node (node
);
923 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl
)))
925 if (! TREE_PUBLIC (node
->decl
))
926 warning_at (DECL_SOURCE_LOCATION (node
->decl
), OPT_Wattributes
,
927 "%<externally_visible%>"
928 " attribute have effect only on public objects");
929 else if (node
->local
.finalized
)
930 cgraph_mark_needed_node (node
);
933 for (vnode
= varpool_nodes
; vnode
!= first_var
; vnode
= vnode
->next
)
935 tree decl
= vnode
->decl
;
936 if (DECL_PRESERVE_P (decl
))
938 mark_decl_referenced (decl
);
939 vnode
->force_output
= true;
940 if (vnode
->finalized
)
941 varpool_mark_needed_node (vnode
);
943 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl
)))
945 if (! TREE_PUBLIC (vnode
->decl
))
946 warning_at (DECL_SOURCE_LOCATION (vnode
->decl
), OPT_Wattributes
,
947 "%<externally_visible%>"
948 " attribute have effect only on public objects");
949 else if (vnode
->finalized
)
950 varpool_mark_needed_node (vnode
);
955 /* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
956 each reachable functions) and build cgraph.
957 The function can be called multiple times after inserting new nodes
958 into beginning of queue. Just the new part of queue is re-scanned then. */
961 cgraph_analyze_functions (void)
963 /* Keep track of already processed nodes when called multiple times for
964 intermodule optimization. */
965 static struct cgraph_node
*first_analyzed
;
966 struct cgraph_node
*first_processed
= first_analyzed
;
967 static struct varpool_node
*first_analyzed_var
;
968 struct cgraph_node
*node
, *next
;
970 process_function_and_variable_attributes (first_processed
,
972 first_processed
= cgraph_nodes
;
973 first_analyzed_var
= varpool_nodes
;
974 varpool_analyze_pending_decls ();
975 if (cgraph_dump_file
)
977 fprintf (cgraph_dump_file
, "Initial entry points:");
978 for (node
= cgraph_nodes
; node
!= first_analyzed
; node
= node
->next
)
980 fprintf (cgraph_dump_file
, " %s", cgraph_node_name (node
));
981 fprintf (cgraph_dump_file
, "\n");
983 cgraph_process_new_functions ();
985 /* Propagate reachability flag and lower representation of all reachable
986 functions. In the future, lowering will introduce new functions and
987 new entry points on the way (by template instantiation and virtual
988 method table generation for instance). */
989 while (cgraph_nodes_queue
)
991 struct cgraph_edge
*edge
;
992 tree decl
= cgraph_nodes_queue
->decl
;
994 node
= cgraph_nodes_queue
;
995 cgraph_nodes_queue
= cgraph_nodes_queue
->next_needed
;
996 node
->next_needed
= NULL
;
998 /* ??? It is possible to create extern inline function and later using
999 weak alias attribute to kill its body. See
1000 gcc.c-torture/compile/20011119-1.c */
1001 if (!DECL_STRUCT_FUNCTION (decl
))
1003 cgraph_reset_node (node
);
1007 if (!node
->analyzed
)
1008 cgraph_analyze_function (node
);
1010 for (edge
= node
->callees
; edge
; edge
= edge
->next_callee
)
1011 if (!edge
->callee
->reachable
)
1012 cgraph_mark_reachable_node (edge
->callee
);
1014 if (node
->same_comdat_group
)
1016 for (next
= node
->same_comdat_group
;
1018 next
= next
->same_comdat_group
)
1019 cgraph_mark_reachable_node (next
);
1022 /* If decl is a clone of an abstract function, mark that abstract
1023 function so that we don't release its body. The DECL_INITIAL() of that
1024 abstract function declaration will be later needed to output debug info. */
1025 if (DECL_ABSTRACT_ORIGIN (decl
))
1027 struct cgraph_node
*origin_node
= cgraph_node (DECL_ABSTRACT_ORIGIN (decl
));
1028 origin_node
->abstract_and_needed
= true;
1031 /* We finalize local static variables during constructing callgraph
1032 edges. Process their attributes too. */
1033 process_function_and_variable_attributes (first_processed
,
1034 first_analyzed_var
);
1035 first_processed
= cgraph_nodes
;
1036 first_analyzed_var
= varpool_nodes
;
1037 varpool_analyze_pending_decls ();
1038 cgraph_process_new_functions ();
1041 /* Collect entry points to the unit. */
1042 if (cgraph_dump_file
)
1044 fprintf (cgraph_dump_file
, "Unit entry points:");
1045 for (node
= cgraph_nodes
; node
!= first_analyzed
; node
= node
->next
)
1047 fprintf (cgraph_dump_file
, " %s", cgraph_node_name (node
));
1048 fprintf (cgraph_dump_file
, "\n\nInitial ");
1049 dump_cgraph (cgraph_dump_file
);
1052 if (cgraph_dump_file
)
1053 fprintf (cgraph_dump_file
, "\nReclaiming functions:");
1055 for (node
= cgraph_nodes
; node
!= first_analyzed
; node
= next
)
1057 tree decl
= node
->decl
;
1060 if (node
->local
.finalized
&& !gimple_has_body_p (decl
))
1061 cgraph_reset_node (node
);
1063 if (!node
->reachable
&& gimple_has_body_p (decl
))
1065 if (cgraph_dump_file
)
1066 fprintf (cgraph_dump_file
, " %s", cgraph_node_name (node
));
1067 cgraph_remove_node (node
);
1071 node
->next_needed
= NULL
;
1072 gcc_assert (!node
->local
.finalized
|| gimple_has_body_p (decl
));
1073 gcc_assert (node
->analyzed
== node
->local
.finalized
);
1075 if (cgraph_dump_file
)
1077 fprintf (cgraph_dump_file
, "\n\nReclaimed ");
1078 dump_cgraph (cgraph_dump_file
);
1080 first_analyzed
= cgraph_nodes
;
1085 /* Analyze the whole compilation unit once it is parsed completely. */
1088 cgraph_finalize_compilation_unit (void)
1090 timevar_push (TV_CGRAPH
);
1092 /* Do not skip analyzing the functions if there were errors, we
1093 miss diagnostics for following functions otherwise. */
1095 /* Emit size functions we didn't inline. */
1096 finalize_size_functions ();
1098 /* Call functions declared with the "constructor" or "destructor"
1100 cgraph_build_cdtor_fns ();
1102 /* Mark alias targets necessary and emit diagnostics. */
1103 finish_aliases_1 ();
1107 fprintf (stderr
, "\nAnalyzing compilation unit\n");
1111 /* Gimplify and lower all functions, compute reachability and
1112 remove unreachable nodes. */
1113 cgraph_analyze_functions ();
1115 /* Mark alias targets necessary and emit diagnostics. */
1116 finish_aliases_1 ();
1118 /* Gimplify and lower thunks. */
1119 cgraph_analyze_functions ();
1121 /* Finally drive the pass manager. */
1124 timevar_pop (TV_CGRAPH
);
1128 /* Figure out what functions we want to assemble. */
1131 cgraph_mark_functions_to_output (void)
1133 struct cgraph_node
*node
;
1134 #ifdef ENABLE_CHECKING
1135 bool check_same_comdat_groups
= false;
1137 for (node
= cgraph_nodes
; node
; node
= node
->next
)
1138 gcc_assert (!node
->process
);
1141 for (node
= cgraph_nodes
; node
; node
= node
->next
)
1143 tree decl
= node
->decl
;
1144 struct cgraph_edge
*e
;
1146 gcc_assert (!node
->process
|| node
->same_comdat_group
);
1150 for (e
= node
->callers
; e
; e
= e
->next_caller
)
1151 if (e
->inline_failed
)
1154 /* We need to output all local functions that are used and not
1155 always inlined, as well as those that are reachable from
1156 outside the current compilation unit. */
1158 && !node
->global
.inlined_to
1159 && (node
->needed
|| node
->reachable_from_other_partition
1160 || (e
&& node
->reachable
))
1161 && !TREE_ASM_WRITTEN (decl
)
1162 && !DECL_EXTERNAL (decl
))
1165 if (node
->same_comdat_group
)
1167 struct cgraph_node
*next
;
1168 for (next
= node
->same_comdat_group
;
1170 next
= next
->same_comdat_group
)
1174 else if (node
->same_comdat_group
)
1176 #ifdef ENABLE_CHECKING
1177 check_same_comdat_groups
= true;
1182 /* We should've reclaimed all functions that are not needed. */
1183 #ifdef ENABLE_CHECKING
1184 if (!node
->global
.inlined_to
1185 && gimple_has_body_p (decl
)
1186 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1187 are inside partition, we can end up not removing the body since we no longer
1188 have analyzed node pointing to it. */
1189 && !node
->in_other_partition
1190 && !DECL_EXTERNAL (decl
))
1192 dump_cgraph_node (stderr
, node
);
1193 internal_error ("failed to reclaim unneeded function");
1196 gcc_assert (node
->global
.inlined_to
1197 || !gimple_has_body_p (decl
)
1198 || node
->in_other_partition
1199 || DECL_EXTERNAL (decl
));
1204 #ifdef ENABLE_CHECKING
1205 if (check_same_comdat_groups
)
1206 for (node
= cgraph_nodes
; node
; node
= node
->next
)
1207 if (node
->same_comdat_group
&& !node
->process
)
1209 tree decl
= node
->decl
;
1210 if (!node
->global
.inlined_to
1211 && gimple_has_body_p (decl
)
1212 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1213 are inside partition, we can end up not removing the body since we no longer
1214 have analyzed node pointing to it. */
1215 && !node
->in_other_partition
1216 && !DECL_EXTERNAL (decl
))
1218 dump_cgraph_node (stderr
, node
);
1219 internal_error ("failed to reclaim unneeded function");
1225 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1226 in lowered gimple form.
1228 Set current_function_decl and cfun to newly constructed empty function body.
1229 return basic block in the function body. */
1232 init_lowered_empty_function (tree decl
)
1236 current_function_decl
= decl
;
1237 allocate_struct_function (decl
, false);
1238 gimple_register_cfg_hooks ();
1239 init_empty_tree_cfg ();
1240 init_tree_ssa (cfun
);
1241 init_ssa_operands ();
1242 cfun
->gimple_df
->in_ssa_p
= true;
1243 DECL_INITIAL (decl
) = make_node (BLOCK
);
1245 DECL_SAVED_TREE (decl
) = error_mark_node
;
1246 cfun
->curr_properties
|=
1247 (PROP_gimple_lcf
| PROP_gimple_leh
| PROP_cfg
| PROP_referenced_vars
|
1250 /* Create BB for body of the function and connect it properly. */
1251 bb
= create_basic_block (NULL
, (void *) 0, ENTRY_BLOCK_PTR
);
1252 make_edge (ENTRY_BLOCK_PTR
, bb
, 0);
1253 make_edge (bb
, EXIT_BLOCK_PTR
, 0);
1258 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1259 offset indicated by VIRTUAL_OFFSET, if that is
1260 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1261 zero for a result adjusting thunk. */
1264 thunk_adjust (gimple_stmt_iterator
* bsi
,
1265 tree ptr
, bool this_adjusting
,
1266 HOST_WIDE_INT fixed_offset
, tree virtual_offset
)
1272 && fixed_offset
!= 0)
1274 stmt
= gimple_build_assign (ptr
,
1275 fold_build2_loc (input_location
,
1277 TREE_TYPE (ptr
), ptr
,
1278 size_int (fixed_offset
)));
1279 gsi_insert_after (bsi
, stmt
, GSI_NEW_STMT
);
1282 /* If there's a virtual offset, look up that value in the vtable and
1283 adjust the pointer again. */
1291 if (!vtable_entry_type
)
1293 tree vfunc_type
= make_node (FUNCTION_TYPE
);
1294 TREE_TYPE (vfunc_type
) = integer_type_node
;
1295 TYPE_ARG_TYPES (vfunc_type
) = NULL_TREE
;
1296 layout_type (vfunc_type
);
1298 vtable_entry_type
= build_pointer_type (vfunc_type
);
1302 create_tmp_var (build_pointer_type
1303 (build_pointer_type (vtable_entry_type
)), "vptr");
1305 /* The vptr is always at offset zero in the object. */
1306 stmt
= gimple_build_assign (vtabletmp
,
1307 build1 (NOP_EXPR
, TREE_TYPE (vtabletmp
),
1309 gsi_insert_after (bsi
, stmt
, GSI_NEW_STMT
);
1310 mark_symbols_for_renaming (stmt
);
1311 find_referenced_vars_in (stmt
);
1313 /* Form the vtable address. */
1314 vtabletmp2
= create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp
)),
1316 stmt
= gimple_build_assign (vtabletmp2
,
1317 build1 (INDIRECT_REF
,
1318 TREE_TYPE (vtabletmp2
), vtabletmp
));
1319 gsi_insert_after (bsi
, stmt
, GSI_NEW_STMT
);
1320 mark_symbols_for_renaming (stmt
);
1321 find_referenced_vars_in (stmt
);
1323 /* Find the entry with the vcall offset. */
1324 stmt
= gimple_build_assign (vtabletmp2
,
1325 fold_build2_loc (input_location
,
1327 TREE_TYPE (vtabletmp2
),
1329 fold_convert (sizetype
,
1331 gsi_insert_after (bsi
, stmt
, GSI_NEW_STMT
);
1333 /* Get the offset itself. */
1334 vtabletmp3
= create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp2
)),
1336 stmt
= gimple_build_assign (vtabletmp3
,
1337 build1 (INDIRECT_REF
,
1338 TREE_TYPE (vtabletmp3
),
1340 gsi_insert_after (bsi
, stmt
, GSI_NEW_STMT
);
1341 mark_symbols_for_renaming (stmt
);
1342 find_referenced_vars_in (stmt
);
1344 /* Cast to sizetype. */
1345 offsettmp
= create_tmp_var (sizetype
, "offset");
1346 stmt
= gimple_build_assign (offsettmp
, fold_convert (sizetype
, vtabletmp3
));
1347 gsi_insert_after (bsi
, stmt
, GSI_NEW_STMT
);
1348 mark_symbols_for_renaming (stmt
);
1349 find_referenced_vars_in (stmt
);
1351 /* Adjust the `this' pointer. */
1352 ptr
= fold_build2_loc (input_location
,
1353 POINTER_PLUS_EXPR
, TREE_TYPE (ptr
), ptr
,
1358 && fixed_offset
!= 0)
1359 /* Adjust the pointer by the constant. */
1363 if (TREE_CODE (ptr
) == VAR_DECL
)
1367 ptrtmp
= create_tmp_var (TREE_TYPE (ptr
), "ptr");
1368 stmt
= gimple_build_assign (ptrtmp
, ptr
);
1369 gsi_insert_after (bsi
, stmt
, GSI_NEW_STMT
);
1370 mark_symbols_for_renaming (stmt
);
1371 find_referenced_vars_in (stmt
);
1373 ptr
= fold_build2_loc (input_location
,
1374 POINTER_PLUS_EXPR
, TREE_TYPE (ptrtmp
), ptrtmp
,
1375 size_int (fixed_offset
));
1378 /* Emit the statement and gimplify the adjustment expression. */
1379 ret
= create_tmp_var (TREE_TYPE (ptr
), "adjusted_this");
1380 stmt
= gimple_build_assign (ret
, ptr
);
1381 mark_symbols_for_renaming (stmt
);
1382 find_referenced_vars_in (stmt
);
1383 gsi_insert_after (bsi
, stmt
, GSI_NEW_STMT
);
1388 /* Produce assembler for thunk NODE. */
1391 assemble_thunk (struct cgraph_node
*node
)
1393 bool this_adjusting
= node
->thunk
.this_adjusting
;
1394 HOST_WIDE_INT fixed_offset
= node
->thunk
.fixed_offset
;
1395 HOST_WIDE_INT virtual_value
= node
->thunk
.virtual_value
;
1396 tree virtual_offset
= NULL
;
1397 tree alias
= node
->thunk
.alias
;
1398 tree thunk_fndecl
= node
->decl
;
1399 tree a
= DECL_ARGUMENTS (thunk_fndecl
);
1401 current_function_decl
= thunk_fndecl
;
1404 && targetm
.asm_out
.can_output_mi_thunk (thunk_fndecl
, fixed_offset
,
1405 virtual_value
, alias
))
1410 DECL_RESULT (thunk_fndecl
)
1411 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl
),
1412 RESULT_DECL
, 0, integer_type_node
);
1413 fnname
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl
));
1415 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1417 fn_block
= make_node (BLOCK
);
1418 BLOCK_VARS (fn_block
) = a
;
1419 DECL_INITIAL (thunk_fndecl
) = fn_block
;
1420 init_function_start (thunk_fndecl
);
1422 assemble_start_function (thunk_fndecl
, fnname
);
1424 targetm
.asm_out
.output_mi_thunk (asm_out_file
, thunk_fndecl
,
1425 fixed_offset
, virtual_value
, alias
);
1427 assemble_end_function (thunk_fndecl
, fnname
);
1428 init_insn_lengths ();
1429 free_after_compilation (cfun
);
1431 TREE_ASM_WRITTEN (thunk_fndecl
) = 1;
1436 basic_block bb
, then_bb
, else_bb
, return_bb
;
1437 gimple_stmt_iterator bsi
;
1443 VEC(tree
, heap
) *vargs
;
1448 DECL_IGNORED_P (thunk_fndecl
) = 1;
1449 bitmap_obstack_initialize (NULL
);
1451 if (node
->thunk
.virtual_offset_p
)
1452 virtual_offset
= size_int (virtual_value
);
1454 /* Build the return declaration for the function. */
1455 restype
= TREE_TYPE (TREE_TYPE (thunk_fndecl
));
1456 if (DECL_RESULT (thunk_fndecl
) == NULL_TREE
)
1458 resdecl
= build_decl (input_location
, RESULT_DECL
, 0, restype
);
1459 DECL_ARTIFICIAL (resdecl
) = 1;
1460 DECL_IGNORED_P (resdecl
) = 1;
1461 DECL_RESULT (thunk_fndecl
) = resdecl
;
1464 resdecl
= DECL_RESULT (thunk_fndecl
);
1466 bb
= then_bb
= else_bb
= return_bb
= init_lowered_empty_function (thunk_fndecl
);
1468 bsi
= gsi_start_bb (bb
);
1470 /* Build call to the function being thunked. */
1471 if (!VOID_TYPE_P (restype
))
1473 if (!is_gimple_reg_type (restype
))
1476 cfun
->local_decls
= tree_cons (NULL_TREE
, restmp
, cfun
->local_decls
);
1477 BLOCK_VARS (DECL_INITIAL (current_function_decl
)) = restmp
;
1480 restmp
= create_tmp_var_raw (restype
, "retval");
1483 for (arg
= a
; arg
; arg
= TREE_CHAIN (arg
))
1485 vargs
= VEC_alloc (tree
, heap
, nargs
);
1487 VEC_quick_push (tree
, vargs
,
1492 VEC_quick_push (tree
, vargs
, a
);
1493 for (i
= 1, arg
= TREE_CHAIN (a
); i
< nargs
; i
++, arg
= TREE_CHAIN (arg
))
1494 VEC_quick_push (tree
, vargs
, arg
);
1495 call
= gimple_build_call_vec (build_fold_addr_expr_loc (0, alias
), vargs
);
1496 VEC_free (tree
, heap
, vargs
);
1497 gimple_call_set_cannot_inline (call
, true);
1498 gimple_call_set_from_thunk (call
, true);
1500 gimple_call_set_lhs (call
, restmp
);
1501 gsi_insert_after (&bsi
, call
, GSI_NEW_STMT
);
1502 mark_symbols_for_renaming (call
);
1503 find_referenced_vars_in (call
);
1506 if (restmp
&& !this_adjusting
)
1508 tree true_label
= NULL_TREE
;
1510 if (TREE_CODE (TREE_TYPE (restmp
)) == POINTER_TYPE
)
1513 /* If the return type is a pointer, we need to
1514 protect against NULL. We know there will be an
1515 adjustment, because that's why we're emitting a
1517 then_bb
= create_basic_block (NULL
, (void *) 0, bb
);
1518 return_bb
= create_basic_block (NULL
, (void *) 0, then_bb
);
1519 else_bb
= create_basic_block (NULL
, (void *) 0, else_bb
);
1520 remove_edge (single_succ_edge (bb
));
1521 true_label
= gimple_block_label (then_bb
);
1522 stmt
= gimple_build_cond (NE_EXPR
, restmp
,
1523 fold_convert (TREE_TYPE (restmp
),
1525 NULL_TREE
, NULL_TREE
);
1526 gsi_insert_after (&bsi
, stmt
, GSI_NEW_STMT
);
1527 make_edge (bb
, then_bb
, EDGE_TRUE_VALUE
);
1528 make_edge (bb
, else_bb
, EDGE_FALSE_VALUE
);
1529 make_edge (return_bb
, EXIT_BLOCK_PTR
, 0);
1530 make_edge (then_bb
, return_bb
, EDGE_FALLTHRU
);
1531 make_edge (else_bb
, return_bb
, EDGE_FALLTHRU
);
1532 bsi
= gsi_last_bb (then_bb
);
1535 restmp
= thunk_adjust (&bsi
, restmp
, /*this_adjusting=*/0,
1536 fixed_offset
, virtual_offset
);
1540 bsi
= gsi_last_bb (else_bb
);
1541 stmt
= gimple_build_assign (restmp
, fold_convert (TREE_TYPE (restmp
),
1542 integer_zero_node
));
1543 gsi_insert_after (&bsi
, stmt
, GSI_NEW_STMT
);
1544 bsi
= gsi_last_bb (return_bb
);
1548 gimple_call_set_tail (call
, true);
1550 /* Build return value. */
1551 ret
= gimple_build_return (restmp
);
1552 gsi_insert_after (&bsi
, ret
, GSI_NEW_STMT
);
1554 delete_unreachable_blocks ();
1555 update_ssa (TODO_update_ssa
);
1557 cgraph_remove_same_body_alias (node
);
1558 /* Since we want to emit the thunk, we explicitly mark its name as
1560 mark_decl_referenced (thunk_fndecl
);
1561 cgraph_add_new_function (thunk_fndecl
, true);
1562 bitmap_obstack_release (NULL
);
1564 current_function_decl
= NULL
;
1567 /* Expand function specified by NODE. */
1570 cgraph_expand_function (struct cgraph_node
*node
)
1572 tree decl
= node
->decl
;
1574 /* We ought to not compile any inline clones. */
1575 gcc_assert (!node
->global
.inlined_to
);
1577 announce_function (decl
);
1580 gcc_assert (node
->lowered
);
1582 /* Generate RTL for the body of DECL. */
1583 tree_rest_of_compilation (decl
);
1585 /* Make sure that BE didn't give up on compiling. */
1586 gcc_assert (TREE_ASM_WRITTEN (decl
));
1587 current_function_decl
= NULL
;
1588 if (node
->same_body
)
1590 struct cgraph_node
*alias
, *next
;
1591 bool saved_alias
= node
->alias
;
1592 for (alias
= node
->same_body
;
1593 alias
&& alias
->next
; alias
= alias
->next
)
1595 /* Walk aliases in the order they were created; it is possible that
1596 thunks reffers to the aliases made earlier. */
1597 for (; alias
; alias
= next
)
1599 next
= alias
->previous
;
1600 if (!alias
->thunk
.thunk_p
)
1601 assemble_alias (alias
->decl
,
1602 DECL_ASSEMBLER_NAME (alias
->thunk
.alias
));
1604 assemble_thunk (alias
);
1606 node
->alias
= saved_alias
;
1608 gcc_assert (!cgraph_preserve_function_body_p (decl
));
1609 cgraph_release_function_body (node
);
1610 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1611 points to the dead function body. */
1612 cgraph_node_remove_callees (node
);
1614 cgraph_function_flags_ready
= true;
1617 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
1620 cgraph_inline_p (struct cgraph_edge
*e
, cgraph_inline_failed_t
*reason
)
1622 *reason
= e
->inline_failed
;
1623 return !e
->inline_failed
;
1628 /* Expand all functions that must be output.
1630 Attempt to topologically sort the nodes so function is output when
1631 all called functions are already assembled to allow data to be
1632 propagated across the callgraph. Use a stack to get smaller distance
1633 between a function and its callees (later we may choose to use a more
1634 sophisticated algorithm for function reordering; we will likely want
1635 to use subsections to make the output functions appear in top-down
1639 cgraph_expand_all_functions (void)
1641 struct cgraph_node
*node
;
1642 struct cgraph_node
**order
= XCNEWVEC (struct cgraph_node
*, cgraph_n_nodes
);
1643 int order_pos
, new_order_pos
= 0;
1646 order_pos
= cgraph_postorder (order
);
1647 gcc_assert (order_pos
== cgraph_n_nodes
);
1649 /* Garbage collector may remove inline clones we eliminate during
1650 optimization. So we must be sure to not reference them. */
1651 for (i
= 0; i
< order_pos
; i
++)
1652 if (order
[i
]->process
)
1653 order
[new_order_pos
++] = order
[i
];
1655 for (i
= new_order_pos
- 1; i
>= 0; i
--)
1660 gcc_assert (node
->reachable
);
1662 cgraph_expand_function (node
);
1665 cgraph_process_new_functions ();
1671 /* This is used to sort the node types by the cgraph order number. */
1673 enum cgraph_order_sort_kind
1675 ORDER_UNDEFINED
= 0,
1681 struct cgraph_order_sort
1683 enum cgraph_order_sort_kind kind
;
1686 struct cgraph_node
*f
;
1687 struct varpool_node
*v
;
1688 struct cgraph_asm_node
*a
;
1692 /* Output all functions, variables, and asm statements in the order
1693 according to their order fields, which is the order in which they
1694 appeared in the file. This implements -fno-toplevel-reorder. In
1695 this mode we may output functions and variables which don't really
1696 need to be output. */
1699 cgraph_output_in_order (void)
1702 struct cgraph_order_sort
*nodes
;
1704 struct cgraph_node
*pf
;
1705 struct varpool_node
*pv
;
1706 struct cgraph_asm_node
*pa
;
1709 nodes
= XCNEWVEC (struct cgraph_order_sort
, max
);
1711 varpool_analyze_pending_decls ();
1713 for (pf
= cgraph_nodes
; pf
; pf
= pf
->next
)
1718 gcc_assert (nodes
[i
].kind
== ORDER_UNDEFINED
);
1719 nodes
[i
].kind
= ORDER_FUNCTION
;
1724 for (pv
= varpool_nodes_queue
; pv
; pv
= pv
->next_needed
)
1727 gcc_assert (nodes
[i
].kind
== ORDER_UNDEFINED
);
1728 nodes
[i
].kind
= ORDER_VAR
;
1732 for (pa
= cgraph_asm_nodes
; pa
; pa
= pa
->next
)
1735 gcc_assert (nodes
[i
].kind
== ORDER_UNDEFINED
);
1736 nodes
[i
].kind
= ORDER_ASM
;
1740 /* In toplevel reorder mode we output all statics; mark them as needed. */
1741 for (i
= 0; i
< max
; ++i
)
1743 if (nodes
[i
].kind
== ORDER_VAR
)
1745 varpool_mark_needed_node (nodes
[i
].u
.v
);
1748 varpool_empty_needed_queue ();
1750 for (i
= 0; i
< max
; ++i
)
1752 switch (nodes
[i
].kind
)
1754 case ORDER_FUNCTION
:
1755 nodes
[i
].u
.f
->process
= 0;
1756 cgraph_expand_function (nodes
[i
].u
.f
);
1760 varpool_assemble_decl (nodes
[i
].u
.v
);
1764 assemble_asm (nodes
[i
].u
.a
->asm_str
);
1767 case ORDER_UNDEFINED
:
1775 cgraph_asm_nodes
= NULL
;
1779 /* Return true when function body of DECL still needs to be kept around
1780 for later re-use. */
1782 cgraph_preserve_function_body_p (tree decl
)
1784 struct cgraph_node
*node
;
1786 gcc_assert (cgraph_global_info_ready
);
1787 /* Look if there is any clone around. */
1788 node
= cgraph_node (decl
);
1798 current_function_decl
= NULL
;
1799 gimple_register_cfg_hooks ();
1800 bitmap_obstack_initialize (NULL
);
1802 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START
, NULL
);
1805 execute_ipa_pass_list (all_small_ipa_passes
);
1807 /* If pass_all_early_optimizations was not scheduled, the state of
1808 the cgraph will not be properly updated. Update it now. */
1809 if (cgraph_state
< CGRAPH_STATE_IPA_SSA
)
1810 cgraph_state
= CGRAPH_STATE_IPA_SSA
;
1814 /* Generate coverage variables and constructors. */
1817 /* Process new functions added. */
1819 current_function_decl
= NULL
;
1820 cgraph_process_new_functions ();
1822 execute_ipa_summary_passes
1823 ((struct ipa_opt_pass_d
*) all_regular_ipa_passes
);
1825 execute_ipa_summary_passes ((struct ipa_opt_pass_d
*) all_lto_gen_passes
);
1828 ipa_write_summaries ();
1831 execute_ipa_pass_list (all_regular_ipa_passes
);
1832 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END
, NULL
);
1834 bitmap_obstack_release (NULL
);
1838 /* Perform simple optimizations based on callgraph. */
1841 cgraph_optimize (void)
1843 if (errorcount
|| sorrycount
)
1846 #ifdef ENABLE_CHECKING
1850 /* Frontend may output common variables after the unit has been finalized.
1851 It is safe to deal with them here as they are always zero initialized. */
1852 varpool_analyze_pending_decls ();
1854 timevar_push (TV_CGRAPHOPT
);
1855 if (pre_ipa_mem_report
)
1857 fprintf (stderr
, "Memory consumption before IPA\n");
1858 dump_memory_report (false);
1861 fprintf (stderr
, "Performing interprocedural optimizations\n");
1862 cgraph_state
= CGRAPH_STATE_IPA
;
1864 /* Don't run the IPA passes if there was any error or sorry messages. */
1865 if (errorcount
== 0 && sorrycount
== 0)
1868 /* Do nothing else if any IPA pass found errors. */
1869 if (errorcount
|| sorrycount
)
1871 timevar_pop (TV_CGRAPHOPT
);
1875 /* This pass remove bodies of extern inline functions we never inlined.
1876 Do this later so other IPA passes see what is really going on. */
1877 cgraph_remove_unreachable_nodes (false, dump_file
);
1878 cgraph_global_info_ready
= true;
1879 if (cgraph_dump_file
)
1881 fprintf (cgraph_dump_file
, "Optimized ");
1882 dump_cgraph (cgraph_dump_file
);
1883 dump_varpool (cgraph_dump_file
);
1885 if (post_ipa_mem_report
)
1887 fprintf (stderr
, "Memory consumption after IPA\n");
1888 dump_memory_report (false);
1890 timevar_pop (TV_CGRAPHOPT
);
1892 /* Output everything. */
1893 (*debug_hooks
->assembly_start
) ();
1895 fprintf (stderr
, "Assembling functions:\n");
1896 #ifdef ENABLE_CHECKING
1900 cgraph_materialize_all_clones ();
1901 cgraph_mark_functions_to_output ();
1903 cgraph_state
= CGRAPH_STATE_EXPANSION
;
1904 if (!flag_toplevel_reorder
)
1905 cgraph_output_in_order ();
1908 cgraph_output_pending_asms ();
1910 cgraph_expand_all_functions ();
1911 varpool_remove_unreferenced_decls ();
1913 varpool_assemble_pending_decls ();
1915 cgraph_process_new_functions ();
1916 cgraph_state
= CGRAPH_STATE_FINISHED
;
1918 if (cgraph_dump_file
)
1920 fprintf (cgraph_dump_file
, "\nFinal ");
1921 dump_cgraph (cgraph_dump_file
);
1923 #ifdef ENABLE_CHECKING
1925 /* Double check that all inline clones are gone and that all
1926 function bodies have been released from memory. */
1927 if (!(sorrycount
|| errorcount
))
1929 struct cgraph_node
*node
;
1930 bool error_found
= false;
1932 for (node
= cgraph_nodes
; node
; node
= node
->next
)
1934 && (node
->global
.inlined_to
1935 || gimple_has_body_p (node
->decl
)))
1938 dump_cgraph_node (stderr
, node
);
1941 internal_error ("nodes with unreleased memory found");
1947 /* Generate and emit a static constructor or destructor. WHICH must
1948 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
1949 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
1950 initialization priority for this constructor or destructor. */
1953 cgraph_build_static_cdtor (char which
, tree body
, int priority
)
1955 static int counter
= 0;
1957 tree decl
, name
, resdecl
;
1959 /* The priority is encoded in the constructor or destructor name.
1960 collect2 will sort the names and arrange that they are called at
1962 sprintf (which_buf
, "%c_%.5d_%d", which
, priority
, counter
++);
1963 name
= get_file_function_name (which_buf
);
1965 decl
= build_decl (input_location
, FUNCTION_DECL
, name
,
1966 build_function_type (void_type_node
, void_list_node
));
1967 current_function_decl
= decl
;
1969 resdecl
= build_decl (input_location
,
1970 RESULT_DECL
, NULL_TREE
, void_type_node
);
1971 DECL_ARTIFICIAL (resdecl
) = 1;
1972 DECL_RESULT (decl
) = resdecl
;
1973 DECL_CONTEXT (resdecl
) = decl
;
1975 allocate_struct_function (decl
, false);
1977 TREE_STATIC (decl
) = 1;
1978 TREE_USED (decl
) = 1;
1979 DECL_ARTIFICIAL (decl
) = 1;
1980 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl
) = 1;
1981 DECL_SAVED_TREE (decl
) = body
;
1982 if (!targetm
.have_ctors_dtors
)
1984 TREE_PUBLIC (decl
) = 1;
1985 DECL_PRESERVE_P (decl
) = 1;
1987 DECL_UNINLINABLE (decl
) = 1;
1989 DECL_INITIAL (decl
) = make_node (BLOCK
);
1990 TREE_USED (DECL_INITIAL (decl
)) = 1;
1992 DECL_SOURCE_LOCATION (decl
) = input_location
;
1993 cfun
->function_end_locus
= input_location
;
1998 DECL_STATIC_CONSTRUCTOR (decl
) = 1;
1999 decl_init_priority_insert (decl
, priority
);
2002 DECL_STATIC_DESTRUCTOR (decl
) = 1;
2003 decl_fini_priority_insert (decl
, priority
);
2009 gimplify_function_tree (decl
);
2011 cgraph_add_new_function (decl
, false);
2012 cgraph_mark_needed_node (cgraph_node (decl
));
2019 cgraph_dump_file
= dump_begin (TDI_cgraph
, NULL
);
2022 /* The edges representing the callers of the NEW_VERSION node were
2023 fixed by cgraph_function_versioning (), now the call_expr in their
2024 respective tree code should be updated to call the NEW_VERSION. */
2027 update_call_expr (struct cgraph_node
*new_version
)
2029 struct cgraph_edge
*e
;
2031 gcc_assert (new_version
);
2033 /* Update the call expr on the edges to call the new version. */
2034 for (e
= new_version
->callers
; e
; e
= e
->next_caller
)
2036 struct function
*inner_function
= DECL_STRUCT_FUNCTION (e
->caller
->decl
);
2037 gimple_call_set_fndecl (e
->call_stmt
, new_version
->decl
);
2038 maybe_clean_eh_stmt_fn (inner_function
, e
->call_stmt
);
2043 /* Create a new cgraph node which is the new version of
2044 OLD_VERSION node. REDIRECT_CALLERS holds the callers
2045 edges which should be redirected to point to
2046 NEW_VERSION. ALL the callees edges of OLD_VERSION
2047 are cloned to the new version node. Return the new
2050 static struct cgraph_node
*
2051 cgraph_copy_node_for_versioning (struct cgraph_node
*old_version
,
2053 VEC(cgraph_edge_p
,heap
) *redirect_callers
)
2055 struct cgraph_node
*new_version
;
2056 struct cgraph_edge
*e
;
2057 struct cgraph_edge
*next_callee
;
2060 gcc_assert (old_version
);
2062 new_version
= cgraph_node (new_decl
);
2064 new_version
->analyzed
= true;
2065 new_version
->local
= old_version
->local
;
2066 new_version
->global
= old_version
->global
;
2067 new_version
->rtl
= new_version
->rtl
;
2068 new_version
->reachable
= true;
2069 new_version
->count
= old_version
->count
;
2071 /* Clone the old node callees. Recursive calls are
2073 for (e
= old_version
->callees
;e
; e
=e
->next_callee
)
2075 cgraph_clone_edge (e
, new_version
, e
->call_stmt
,
2076 e
->lto_stmt_uid
, REG_BR_PROB_BASE
,
2078 e
->loop_nest
, true);
2080 /* Fix recursive calls.
2081 If OLD_VERSION has a recursive call after the
2082 previous edge cloning, the new version will have an edge
2083 pointing to the old version, which is wrong;
2084 Redirect it to point to the new version. */
2085 for (e
= new_version
->callees
; e
; e
= next_callee
)
2087 next_callee
= e
->next_callee
;
2088 if (e
->callee
== old_version
)
2089 cgraph_redirect_edge_callee (e
, new_version
);
2094 for (i
= 0; VEC_iterate (cgraph_edge_p
, redirect_callers
, i
, e
); i
++)
2096 /* Redirect calls to the old version node to point to its new
2098 cgraph_redirect_edge_callee (e
, new_version
);
2104 /* Perform function versioning.
2105 Function versioning includes copying of the tree and
2106 a callgraph update (creating a new cgraph node and updating
2107 its callees and callers).
2109 REDIRECT_CALLERS varray includes the edges to be redirected
2112 TREE_MAP is a mapping of tree nodes we want to replace with
2113 new ones (according to results of prior analysis).
2114 OLD_VERSION_NODE is the node that is versioned.
2115 It returns the new version's cgraph node.
2116 ARGS_TO_SKIP lists arguments to be omitted from functions
2119 struct cgraph_node
*
2120 cgraph_function_versioning (struct cgraph_node
*old_version_node
,
2121 VEC(cgraph_edge_p
,heap
) *redirect_callers
,
2122 VEC (ipa_replace_map_p
,gc
)* tree_map
,
2123 bitmap args_to_skip
)
2125 tree old_decl
= old_version_node
->decl
;
2126 struct cgraph_node
*new_version_node
= NULL
;
2129 if (!tree_versionable_function_p (old_decl
))
2132 /* Make a new FUNCTION_DECL tree node for the
2135 new_decl
= copy_node (old_decl
);
2137 new_decl
= build_function_decl_skip_args (old_decl
, args_to_skip
);
2139 /* Create the new version's call-graph node.
2140 and update the edges of the new node. */
2142 cgraph_copy_node_for_versioning (old_version_node
, new_decl
,
2145 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2146 tree_function_versioning (old_decl
, new_decl
, tree_map
, false, args_to_skip
);
2148 /* Update the new version's properties.
2149 Make The new version visible only within this translation unit. Make sure
2150 that is not weak also.
2151 ??? We cannot use COMDAT linkage because there is no
2152 ABI support for this. */
2153 cgraph_make_decl_local (new_version_node
->decl
);
2154 DECL_VIRTUAL_P (new_version_node
->decl
) = 0;
2155 new_version_node
->local
.externally_visible
= 0;
2156 new_version_node
->local
.local
= 1;
2157 new_version_node
->lowered
= true;
2159 /* Update the call_expr on the edges to call the new version node. */
2160 update_call_expr (new_version_node
);
2162 cgraph_call_function_insertion_hooks (new_version_node
);
2163 return new_version_node
;
2166 /* Produce separate function body for inline clones so the offline copy can be
2167 modified without affecting them. */
2168 struct cgraph_node
*
2169 save_inline_function_body (struct cgraph_node
*node
)
2171 struct cgraph_node
*first_clone
, *n
;
2173 gcc_assert (node
== cgraph_node (node
->decl
));
2175 cgraph_lower_function (node
);
2177 first_clone
= node
->clones
;
2179 first_clone
->decl
= copy_node (node
->decl
);
2180 cgraph_insert_node_to_hashtable (first_clone
);
2181 gcc_assert (first_clone
== cgraph_node (first_clone
->decl
));
2182 if (first_clone
->next_sibling_clone
)
2184 for (n
= first_clone
->next_sibling_clone
; n
->next_sibling_clone
; n
= n
->next_sibling_clone
)
2185 n
->clone_of
= first_clone
;
2186 n
->clone_of
= first_clone
;
2187 n
->next_sibling_clone
= first_clone
->clones
;
2188 if (first_clone
->clones
)
2189 first_clone
->clones
->prev_sibling_clone
= n
;
2190 first_clone
->clones
= first_clone
->next_sibling_clone
;
2191 first_clone
->next_sibling_clone
->prev_sibling_clone
= NULL
;
2192 first_clone
->next_sibling_clone
= NULL
;
2193 gcc_assert (!first_clone
->prev_sibling_clone
);
2195 first_clone
->clone_of
= NULL
;
2196 node
->clones
= NULL
;
2198 if (first_clone
->clones
)
2199 for (n
= first_clone
->clones
; n
!= first_clone
;)
2201 gcc_assert (n
->decl
== node
->decl
);
2202 n
->decl
= first_clone
->decl
;
2205 else if (n
->next_sibling_clone
)
2206 n
= n
->next_sibling_clone
;
2209 while (n
!= first_clone
&& !n
->next_sibling_clone
)
2211 if (n
!= first_clone
)
2212 n
= n
->next_sibling_clone
;
2216 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2217 tree_function_versioning (node
->decl
, first_clone
->decl
, NULL
, true, NULL
);
2219 DECL_EXTERNAL (first_clone
->decl
) = 0;
2220 DECL_COMDAT_GROUP (first_clone
->decl
) = NULL_TREE
;
2221 TREE_PUBLIC (first_clone
->decl
) = 0;
2222 DECL_COMDAT (first_clone
->decl
) = 0;
2223 VEC_free (ipa_opt_pass
, heap
,
2224 first_clone
->ipa_transforms_to_apply
);
2225 first_clone
->ipa_transforms_to_apply
= NULL
;
2227 #ifdef ENABLE_CHECKING
2228 verify_cgraph_node (first_clone
);
2233 /* Given virtual clone, turn it into actual clone. */
2235 cgraph_materialize_clone (struct cgraph_node
*node
)
2237 bitmap_obstack_initialize (NULL
);
2238 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2239 tree_function_versioning (node
->clone_of
->decl
, node
->decl
,
2240 node
->clone
.tree_map
, true,
2241 node
->clone
.args_to_skip
);
2242 if (cgraph_dump_file
)
2244 dump_function_to_file (node
->clone_of
->decl
, cgraph_dump_file
, dump_flags
);
2245 dump_function_to_file (node
->decl
, cgraph_dump_file
, dump_flags
);
2248 /* Function is no longer clone. */
2249 if (node
->next_sibling_clone
)
2250 node
->next_sibling_clone
->prev_sibling_clone
= node
->prev_sibling_clone
;
2251 if (node
->prev_sibling_clone
)
2252 node
->prev_sibling_clone
->next_sibling_clone
= node
->next_sibling_clone
;
2254 node
->clone_of
->clones
= node
->next_sibling_clone
;
2255 node
->next_sibling_clone
= NULL
;
2256 node
->prev_sibling_clone
= NULL
;
2257 if (!node
->clone_of
->analyzed
&& !node
->clone_of
->clones
)
2258 cgraph_remove_node (node
->clone_of
);
2259 node
->clone_of
= NULL
;
2260 bitmap_obstack_release (NULL
);
2263 /* If necessary, change the function declaration in the call statement
2264 associated with E so that it corresponds to the edge callee. */
2267 cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge
*e
)
2269 tree decl
= gimple_call_fndecl (e
->call_stmt
);
2271 gimple_stmt_iterator gsi
;
2273 if (!decl
|| decl
== e
->callee
->decl
2274 /* Don't update call from same body alias to the real function. */
2275 || cgraph_get_node (decl
) == cgraph_get_node (e
->callee
->decl
))
2276 return e
->call_stmt
;
2278 if (cgraph_dump_file
)
2280 fprintf (cgraph_dump_file
, "updating call of %s/%i -> %s/%i: ",
2281 cgraph_node_name (e
->caller
), e
->caller
->uid
,
2282 cgraph_node_name (e
->callee
), e
->callee
->uid
);
2283 print_gimple_stmt (cgraph_dump_file
, e
->call_stmt
, 0, dump_flags
);
2286 if (e
->callee
->clone
.combined_args_to_skip
)
2287 new_stmt
= gimple_call_copy_skip_args (e
->call_stmt
,
2288 e
->callee
->clone
.combined_args_to_skip
);
2290 new_stmt
= e
->call_stmt
;
2291 if (gimple_vdef (new_stmt
)
2292 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
2293 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
2294 gimple_call_set_fndecl (new_stmt
, e
->callee
->decl
);
2296 gsi
= gsi_for_stmt (e
->call_stmt
);
2297 gsi_replace (&gsi
, new_stmt
, true);
2298 update_stmt (new_stmt
);
2300 /* Update EH information too, just in case. */
2301 maybe_clean_or_replace_eh_stmt (e
->call_stmt
, new_stmt
);
2303 cgraph_set_call_stmt_including_clones (e
->caller
, e
->call_stmt
, new_stmt
);
2305 if (cgraph_dump_file
)
2307 fprintf (cgraph_dump_file
, " updated to:");
2308 print_gimple_stmt (cgraph_dump_file
, e
->call_stmt
, 0, dump_flags
);
2313 /* Once all functions from compilation unit are in memory, produce all clones
2314 and update all calls. We might also do this on demand if we don't want to
2315 bring all functions to memory prior compilation, but current WHOPR
2316 implementation does that and it is is bit easier to keep everything right in
2319 cgraph_materialize_all_clones (void)
2321 struct cgraph_node
*node
;
2322 bool stabilized
= false;
2324 if (cgraph_dump_file
)
2325 fprintf (cgraph_dump_file
, "Materializing clones\n");
2326 #ifdef ENABLE_CHECKING
2330 /* We can also do topological order, but number of iterations should be
2331 bounded by number of IPA passes since single IPA pass is probably not
2332 going to create clones of clones it created itself. */
2336 for (node
= cgraph_nodes
; node
; node
= node
->next
)
2338 if (node
->clone_of
&& node
->decl
!= node
->clone_of
->decl
2339 && !gimple_has_body_p (node
->decl
))
2341 if (gimple_has_body_p (node
->clone_of
->decl
))
2343 if (cgraph_dump_file
)
2345 fprintf (cgraph_dump_file
, "clonning %s to %s\n",
2346 cgraph_node_name (node
->clone_of
),
2347 cgraph_node_name (node
));
2348 if (node
->clone
.tree_map
)
2351 fprintf (cgraph_dump_file
, " replace map: ");
2352 for (i
= 0; i
< VEC_length (ipa_replace_map_p
,
2353 node
->clone
.tree_map
);
2356 struct ipa_replace_map
*replace_info
;
2357 replace_info
= VEC_index (ipa_replace_map_p
,
2358 node
->clone
.tree_map
,
2360 print_generic_expr (cgraph_dump_file
, replace_info
->old_tree
, 0);
2361 fprintf (cgraph_dump_file
, " -> ");
2362 print_generic_expr (cgraph_dump_file
, replace_info
->new_tree
, 0);
2363 fprintf (cgraph_dump_file
, "%s%s;",
2364 replace_info
->replace_p
? "(replace)":"",
2365 replace_info
->ref_p
? "(ref)":"");
2367 fprintf (cgraph_dump_file
, "\n");
2369 if (node
->clone
.args_to_skip
)
2371 fprintf (cgraph_dump_file
, " args_to_skip: ");
2372 dump_bitmap (cgraph_dump_file
, node
->clone
.args_to_skip
);
2374 if (node
->clone
.args_to_skip
)
2376 fprintf (cgraph_dump_file
, " combined_args_to_skip:");
2377 dump_bitmap (cgraph_dump_file
, node
->clone
.combined_args_to_skip
);
2380 cgraph_materialize_clone (node
);
2387 for (node
= cgraph_nodes
; node
; node
= node
->next
)
2388 if (!node
->analyzed
&& node
->callees
)
2389 cgraph_node_remove_callees (node
);
2390 if (cgraph_dump_file
)
2391 fprintf (cgraph_dump_file
, "Updating call sites\n");
2392 for (node
= cgraph_nodes
; node
; node
= node
->next
)
2393 if (node
->analyzed
&& !node
->clone_of
2394 && gimple_has_body_p (node
->decl
))
2396 struct cgraph_edge
*e
;
2398 current_function_decl
= node
->decl
;
2399 push_cfun (DECL_STRUCT_FUNCTION (node
->decl
));
2400 for (e
= node
->callees
; e
; e
= e
->next_callee
)
2401 cgraph_redirect_edge_call_stmt_to_callee (e
);
2402 gcc_assert (!need_ssa_update_p (cfun
));
2404 current_function_decl
= NULL
;
2405 #ifdef ENABLE_CHECKING
2406 verify_cgraph_node (node
);
2409 if (cgraph_dump_file
)
2410 fprintf (cgraph_dump_file
, "Materialization Call site updates done.\n");
2411 /* All changes to parameters have been performed. In order not to
2412 incorrectly repeat them, we simply dispose of the bitmaps that drive the
2414 for (node
= cgraph_nodes
; node
; node
= node
->next
)
2415 node
->clone
.combined_args_to_skip
= NULL
;
2416 #ifdef ENABLE_CHECKING
2419 cgraph_remove_unreachable_nodes (false, cgraph_dump_file
);
2422 #include "gt-cgraphunit.h"