tree-pass.h (pass_build_cgraph_edges): Declare.
[gcc.git] / gcc / cgraphunit.c
1 /* Callgraph based interprocedural optimizations.
2 Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
21
22 /* This module implements main driver of compilation process as well as
23 few basic interprocedural optimizers.
24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
35 (There is one exception needed for implementing GCC extern inline
36 function.)
37
38 - varpool_finalize_variable
39
40 This function has same behavior as the above but is used for static
41 variables.
42
43 - cgraph_finalize_compilation_unit
44
45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
47
48 In the unit-at-a-time the call-graph construction and local function
49 analysis takes place here. Bodies of unreachable functions are released
50 to conserve memory usage.
51
52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
54
55 - cgraph_optimize
56
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
61
62 - cgraph_mark_needed_node
63 - varpool_mark_needed_node
64
65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
69 used by C++ frontend to explicitly mark the keyed methods.
70
71 - analyze_expr callback
72
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
76
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
79
80 - expand_function callback
81
82 This function is used to expand function and pass it into RTL back-end.
83 Front-end should not make any assumptions about when this function can be
84 called. In particular cgraph_assemble_pending_functions,
85 varpool_assemble_pending_variables, cgraph_finalize_function,
86 varpool_finalize_function, cgraph_optimize can cause arbitrarily
87 previously finalized functions to be expanded.
88
89 We implement two compilation modes.
90
91 - unit-at-a-time: In this mode analyzing of all functions is deferred
92 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
93
94 In cgraph_finalize_compilation_unit the reachable functions are
95 analyzed. During analysis the call-graph edges from reachable
96 functions are constructed and their destinations are marked as
97 reachable. References to functions and variables are discovered too
98 and variables found to be needed output to the assembly file. Via
99 mark_referenced call in assemble_variable functions referenced by
100 static variables are noticed too.
101
102 The intra-procedural information is produced and its existence
103 indicated by global_info_ready. Once this flag is set it is impossible
104 to change function from !reachable to reachable and thus
105 assemble_variable no longer call mark_referenced.
106
107 Finally the call-graph is topologically sorted and all reachable functions
108 that has not been completely inlined or are not external are output.
109
110 ??? It is possible that reference to function or variable is optimized
111 out. We can not deal with this nicely because topological order is not
112 suitable for it. For tree-ssa we may consider another pass doing
113 optimization and re-discovering reachable functions.
114
115 ??? Reorganize code so variables are output very last and only if they
116 really has been referenced by produced code, so we catch more cases
117 where reference has been optimized out.
118
119 - non-unit-at-a-time
120
121 All functions are variables are output as early as possible to conserve
122 memory consumption. This may or may not result in less memory used but
123 it is still needed for some legacy code that rely on particular ordering
124 of things output from the compiler.
125
126 Varpool data structures are not used and variables are output directly.
127
128 Functions are output early using call of
129 cgraph_assemble_pending_function from cgraph_finalize_function. The
130 decision on whether function is needed is made more conservative so
131 uninlininable static functions are needed too. During the call-graph
132 construction the edge destinations are not marked as reachable and it
133 is completely relied upn assemble_variable to mark them. */
134
135
136 #include "config.h"
137 #include "system.h"
138 #include "coretypes.h"
139 #include "tm.h"
140 #include "tree.h"
141 #include "rtl.h"
142 #include "tree-flow.h"
143 #include "tree-inline.h"
144 #include "langhooks.h"
145 #include "pointer-set.h"
146 #include "toplev.h"
147 #include "flags.h"
148 #include "ggc.h"
149 #include "debug.h"
150 #include "target.h"
151 #include "cgraph.h"
152 #include "diagnostic.h"
153 #include "timevar.h"
154 #include "params.h"
155 #include "fibheap.h"
156 #include "c-common.h"
157 #include "intl.h"
158 #include "function.h"
159 #include "ipa-prop.h"
160 #include "tree-gimple.h"
161 #include "tree-pass.h"
162 #include "output.h"
163
164 static void cgraph_expand_all_functions (void);
165 static void cgraph_mark_functions_to_output (void);
166 static void cgraph_expand_function (struct cgraph_node *);
167 static void cgraph_output_pending_asms (void);
168 static void cgraph_increase_alignment (void);
169
170 static FILE *cgraph_dump_file;
171
172 /* Determine if function DECL is needed. That is, visible to something
173 either outside this translation unit, something magic in the system
174 configury, or (if not doing unit-at-a-time) to something we havn't
175 seen yet. */
176
177 static bool
178 decide_is_function_needed (struct cgraph_node *node, tree decl)
179 {
180 tree origin;
181 if (MAIN_NAME_P (DECL_NAME (decl))
182 && TREE_PUBLIC (decl))
183 {
184 node->local.externally_visible = true;
185 return true;
186 }
187
188 /* If the user told us it is used, then it must be so. */
189 if (node->local.externally_visible)
190 return true;
191
192 if (!flag_unit_at_a_time && lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
193 return true;
194
195 /* ??? If the assembler name is set by hand, it is possible to assemble
196 the name later after finalizing the function and the fact is noticed
197 in assemble_name then. This is arguably a bug. */
198 if (DECL_ASSEMBLER_NAME_SET_P (decl)
199 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
200 return true;
201
202 /* If we decided it was needed before, but at the time we didn't have
203 the body of the function available, then it's still needed. We have
204 to go back and re-check its dependencies now. */
205 if (node->needed)
206 return true;
207
208 /* Externally visible functions must be output. The exception is
209 COMDAT functions that must be output only when they are needed.
210
211 When not optimizing, also output the static functions. (see
212 PR24561), but don't do so for always_inline functions, functions
213 declared inline and nested functions. These was optimized out
214 in the original implementation and it is unclear whether we want
215 to change the behavior here. */
216 if (((TREE_PUBLIC (decl)
217 || (!optimize && !node->local.disregard_inline_limits
218 && !DECL_DECLARED_INLINE_P (decl)
219 && !node->origin))
220 && !flag_whole_program)
221 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
222 return true;
223
224 /* Constructors and destructors are reachable from the runtime by
225 some mechanism. */
226 if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
227 return true;
228
229 if (flag_unit_at_a_time)
230 return false;
231
232 /* If not doing unit at a time, then we'll only defer this function
233 if its marked for inlining. Otherwise we want to emit it now. */
234
235 /* "extern inline" functions are never output locally. */
236 if (DECL_EXTERNAL (decl))
237 return false;
238 /* Nested functions of extern inline function shall not be emit unless
239 we inlined the origin. */
240 for (origin = decl_function_context (decl); origin;
241 origin = decl_function_context (origin))
242 if (DECL_EXTERNAL (origin))
243 return false;
244 /* We want to emit COMDAT functions only when absolutely necessary. */
245 if (DECL_COMDAT (decl))
246 return false;
247 if (!DECL_INLINE (decl)
248 || (!node->local.disregard_inline_limits
249 /* When declared inline, defer even the uninlinable functions.
250 This allows them to be eliminated when unused. */
251 && !DECL_DECLARED_INLINE_P (decl)
252 && (!node->local.inlinable || !cgraph_default_inline_p (node, NULL))))
253 return true;
254
255 return false;
256 }
257
258 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
259 functions into callgraph in a way so they look like ordinary reachable
260 functions inserted into callgraph already at construction time. */
261
262 bool
263 cgraph_process_new_functions (void)
264 {
265 bool output = false;
266 tree fndecl;
267 struct cgraph_node *node;
268
269 /* Note that this queue may grow as its being processed, as the new
270 functions may generate new ones. */
271 while (cgraph_new_nodes)
272 {
273 node = cgraph_new_nodes;
274 fndecl = node->decl;
275 cgraph_new_nodes = cgraph_new_nodes->next_needed;
276 switch (cgraph_state)
277 {
278 case CGRAPH_STATE_CONSTRUCTION:
279 /* At construction time we just need to finalize function and move
280 it into reachable functions list. */
281
282 node->next_needed = NULL;
283 cgraph_finalize_function (fndecl, false);
284 cgraph_mark_reachable_node (node);
285 output = true;
286 break;
287
288 case CGRAPH_STATE_IPA:
289 case CGRAPH_STATE_IPA_SSA:
290 /* When IPA optimization already started, do all essential
291 transformations that has been already performed on the whole
292 cgraph but not on this function. */
293
294 tree_register_cfg_hooks ();
295 if (!node->analyzed)
296 cgraph_analyze_function (node);
297 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
298 current_function_decl = fndecl;
299 node->local.inlinable = tree_inlinable_function_p (fndecl);
300 node->local.self_insns = estimate_num_insns (fndecl);
301 node->local.disregard_inline_limits
302 = lang_hooks.tree_inlining.disregard_inline_limits (fndecl);
303 /* Inlining characteristics are maintained by the
304 cgraph_mark_inline. */
305 node->global.insns = node->local.self_insns;
306 if (flag_really_no_inline && !node->local.disregard_inline_limits)
307 node->local.inlinable = 0;
308 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
309 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
310 /* When not optimizing, be sure we run early local passes anyway
311 to expand OMP. */
312 || !optimize)
313 execute_pass_list (pass_early_local_passes.sub);
314 free_dominance_info (CDI_POST_DOMINATORS);
315 free_dominance_info (CDI_DOMINATORS);
316 pop_cfun ();
317 current_function_decl = NULL;
318 break;
319
320 case CGRAPH_STATE_EXPANSION:
321 /* Functions created during expansion shall be compiled
322 directly. */
323 node->output = 0;
324 cgraph_expand_function (node);
325 break;
326
327 default:
328 gcc_unreachable ();
329 break;
330 }
331 }
332 return output;
333 }
334
335 /* When not doing unit-at-a-time, output all functions enqueued.
336 Return true when such a functions were found. */
337
338 static bool
339 cgraph_assemble_pending_functions (void)
340 {
341 bool output = false;
342
343 if (flag_unit_at_a_time)
344 return false;
345
346 cgraph_output_pending_asms ();
347
348 while (cgraph_nodes_queue)
349 {
350 struct cgraph_node *n = cgraph_nodes_queue;
351
352 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
353 n->next_needed = NULL;
354 if (!n->global.inlined_to
355 && !n->alias
356 && !DECL_EXTERNAL (n->decl))
357 {
358 cgraph_expand_function (n);
359 output = true;
360 }
361 output |= cgraph_process_new_functions ();
362 }
363
364 return output;
365 }
366
367
368 /* As an GCC extension we allow redefinition of the function. The
369 semantics when both copies of bodies differ is not well defined.
370 We replace the old body with new body so in unit at a time mode
371 we always use new body, while in normal mode we may end up with
372 old body inlined into some functions and new body expanded and
373 inlined in others.
374
375 ??? It may make more sense to use one body for inlining and other
376 body for expanding the function but this is difficult to do. */
377
378 static void
379 cgraph_reset_node (struct cgraph_node *node)
380 {
381 /* If node->output is set, then this is a unit-at-a-time compilation
382 and we have already begun whole-unit analysis. This is *not*
383 testing for whether we've already emitted the function. That
384 case can be sort-of legitimately seen with real function
385 redefinition errors. I would argue that the front end should
386 never present us with such a case, but don't enforce that for now. */
387 gcc_assert (!node->output);
388
389 /* Reset our data structures so we can analyze the function again. */
390 memset (&node->local, 0, sizeof (node->local));
391 memset (&node->global, 0, sizeof (node->global));
392 memset (&node->rtl, 0, sizeof (node->rtl));
393 node->analyzed = false;
394 node->local.redefined_extern_inline = true;
395 node->local.finalized = false;
396
397 if (!flag_unit_at_a_time)
398 {
399 struct cgraph_node *n, *next;
400
401 for (n = cgraph_nodes; n; n = next)
402 {
403 next = n->next;
404 if (n->global.inlined_to == node)
405 cgraph_remove_node (n);
406 }
407 }
408
409 cgraph_node_remove_callees (node);
410
411 /* We may need to re-queue the node for assembling in case
412 we already proceeded it and ignored as not needed. */
413 if (node->reachable && !flag_unit_at_a_time)
414 {
415 struct cgraph_node *n;
416
417 for (n = cgraph_nodes_queue; n; n = n->next_needed)
418 if (n == node)
419 break;
420 if (!n)
421 node->reachable = 0;
422 }
423 }
424
425 static void
426 cgraph_lower_function (struct cgraph_node *node)
427 {
428 if (node->lowered)
429 return;
430 tree_lowering_passes (node->decl);
431 node->lowered = true;
432 }
433
434 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
435 logic in effect. If NESTED is true, then our caller cannot stand to have
436 the garbage collector run at the moment. We would need to either create
437 a new GC context, or just not compile right now. */
438
439 void
440 cgraph_finalize_function (tree decl, bool nested)
441 {
442 struct cgraph_node *node = cgraph_node (decl);
443
444 if (node->local.finalized)
445 cgraph_reset_node (node);
446
447 notice_global_symbol (decl);
448 node->decl = decl;
449 node->local.finalized = true;
450 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
451 if (node->nested)
452 lower_nested_functions (decl);
453 gcc_assert (!node->nested);
454
455 /* If not unit at a time, then we need to create the call graph
456 now, so that called functions can be queued and emitted now. */
457 if (!flag_unit_at_a_time)
458 {
459 cgraph_analyze_function (node);
460 cgraph_decide_inlining_incrementally (node, false);
461 }
462
463 if (decide_is_function_needed (node, decl))
464 cgraph_mark_needed_node (node);
465
466 /* Since we reclaim unreachable nodes at the end of every language
467 level unit, we need to be conservative about possible entry points
468 there. */
469 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)))
470 cgraph_mark_reachable_node (node);
471
472 /* If not unit at a time, go ahead and emit everything we've found
473 to be reachable at this time. */
474 if (!nested)
475 {
476 if (!cgraph_assemble_pending_functions ())
477 ggc_collect ();
478 }
479
480 /* If we've not yet emitted decl, tell the debug info about it. */
481 if (!TREE_ASM_WRITTEN (decl))
482 (*debug_hooks->deferred_inline_function) (decl);
483
484 /* Possibly warn about unused parameters. */
485 if (warn_unused_parameter)
486 do_warn_unused_parameter (decl);
487 }
488
489 /* Verify cgraph nodes of given cgraph node. */
490 void
491 verify_cgraph_node (struct cgraph_node *node)
492 {
493 struct cgraph_edge *e;
494 struct cgraph_node *main_clone;
495 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
496 basic_block this_block;
497 block_stmt_iterator bsi;
498 bool error_found = false;
499
500 if (errorcount || sorrycount)
501 return;
502
503 timevar_push (TV_CGRAPH_VERIFY);
504 for (e = node->callees; e; e = e->next_callee)
505 if (e->aux)
506 {
507 error ("aux field set for edge %s->%s",
508 cgraph_node_name (e->caller), cgraph_node_name (e->callee));
509 error_found = true;
510 }
511 if (node->count < 0)
512 {
513 error ("Execution count is negative");
514 error_found = true;
515 }
516 for (e = node->callers; e; e = e->next_caller)
517 {
518 if (e->count < 0)
519 {
520 error ("caller edge count is negative");
521 error_found = true;
522 }
523 if (!e->inline_failed)
524 {
525 if (node->global.inlined_to
526 != (e->caller->global.inlined_to
527 ? e->caller->global.inlined_to : e->caller))
528 {
529 error ("inlined_to pointer is wrong");
530 error_found = true;
531 }
532 if (node->callers->next_caller)
533 {
534 error ("multiple inline callers");
535 error_found = true;
536 }
537 }
538 else
539 if (node->global.inlined_to)
540 {
541 error ("inlined_to pointer set for noninline callers");
542 error_found = true;
543 }
544 }
545 if (!node->callers && node->global.inlined_to)
546 {
547 error ("inlined_to pointer is set but no predecessors found");
548 error_found = true;
549 }
550 if (node->global.inlined_to == node)
551 {
552 error ("inlined_to pointer refers to itself");
553 error_found = true;
554 }
555
556 for (main_clone = cgraph_node (node->decl); main_clone;
557 main_clone = main_clone->next_clone)
558 if (main_clone == node)
559 break;
560 if (!cgraph_node (node->decl))
561 {
562 error ("node not found in cgraph_hash");
563 error_found = true;
564 }
565
566 if (node->analyzed
567 && DECL_SAVED_TREE (node->decl) && !TREE_ASM_WRITTEN (node->decl)
568 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to))
569 {
570 if (this_cfun->cfg)
571 {
572 /* The nodes we're interested in are never shared, so walk
573 the tree ignoring duplicates. */
574 struct pointer_set_t *visited_nodes = pointer_set_create ();
575 /* Reach the trees by walking over the CFG, and note the
576 enclosing basic-blocks in the call edges. */
577 FOR_EACH_BB_FN (this_block, this_cfun)
578 for (bsi = bsi_start (this_block); !bsi_end_p (bsi); bsi_next (&bsi))
579 {
580 tree stmt = bsi_stmt (bsi);
581 tree call = get_call_expr_in (stmt);
582 tree decl;
583 if (call && (decl = get_callee_fndecl (call)))
584 {
585 struct cgraph_edge *e = cgraph_edge (node, stmt);
586 if (e)
587 {
588 if (e->aux)
589 {
590 error ("shared call_stmt:");
591 debug_generic_stmt (stmt);
592 error_found = true;
593 }
594 if (e->callee->decl != cgraph_node (decl)->decl
595 && e->inline_failed)
596 {
597 error ("edge points to wrong declaration:");
598 debug_tree (e->callee->decl);
599 fprintf (stderr," Instead of:");
600 debug_tree (decl);
601 }
602 e->aux = (void *)1;
603 }
604 else
605 {
606 error ("missing callgraph edge for call stmt:");
607 debug_generic_stmt (stmt);
608 error_found = true;
609 }
610 }
611 }
612 pointer_set_destroy (visited_nodes);
613 }
614 else
615 /* No CFG available?! */
616 gcc_unreachable ();
617
618 for (e = node->callees; e; e = e->next_callee)
619 {
620 if (!e->aux)
621 {
622 error ("edge %s->%s has no corresponding call_stmt",
623 cgraph_node_name (e->caller),
624 cgraph_node_name (e->callee));
625 debug_generic_stmt (e->call_stmt);
626 error_found = true;
627 }
628 e->aux = 0;
629 }
630 }
631 if (error_found)
632 {
633 dump_cgraph_node (stderr, node);
634 internal_error ("verify_cgraph_node failed");
635 }
636 timevar_pop (TV_CGRAPH_VERIFY);
637 }
638
639 /* Verify whole cgraph structure. */
640 void
641 verify_cgraph (void)
642 {
643 struct cgraph_node *node;
644
645 if (sorrycount || errorcount)
646 return;
647
648 for (node = cgraph_nodes; node; node = node->next)
649 verify_cgraph_node (node);
650 }
651
652 /* Output all asm statements we have stored up to be output. */
653
654 static void
655 cgraph_output_pending_asms (void)
656 {
657 struct cgraph_asm_node *can;
658
659 if (errorcount || sorrycount)
660 return;
661
662 for (can = cgraph_asm_nodes; can; can = can->next)
663 assemble_asm (can->asm_str);
664 cgraph_asm_nodes = NULL;
665 }
666
667 /* Analyze the function scheduled to be output. */
668 void
669 cgraph_analyze_function (struct cgraph_node *node)
670 {
671 tree decl = node->decl;
672
673 current_function_decl = decl;
674 push_cfun (DECL_STRUCT_FUNCTION (decl));
675 cgraph_lower_function (node);
676
677 node->local.estimated_self_stack_size = estimated_stack_frame_size ();
678 node->global.estimated_stack_size = node->local.estimated_self_stack_size;
679 node->global.stack_frame_offset = 0;
680 node->local.inlinable = tree_inlinable_function_p (decl);
681 if (!flag_unit_at_a_time)
682 node->local.self_insns = estimate_num_insns (decl);
683 if (node->local.inlinable)
684 node->local.disregard_inline_limits
685 = lang_hooks.tree_inlining.disregard_inline_limits (decl);
686 if (flag_really_no_inline && !node->local.disregard_inline_limits)
687 node->local.inlinable = 0;
688 /* Inlining characteristics are maintained by the cgraph_mark_inline. */
689 node->global.insns = node->local.self_insns;
690 if (!flag_unit_at_a_time)
691 {
692 bitmap_obstack_initialize (NULL);
693 tree_register_cfg_hooks ();
694 execute_pass_list (pass_early_local_passes.sub);
695 free_dominance_info (CDI_POST_DOMINATORS);
696 free_dominance_info (CDI_DOMINATORS);
697 bitmap_obstack_release (NULL);
698 }
699
700 node->analyzed = true;
701 pop_cfun ();
702 current_function_decl = NULL;
703 }
704
705 /* Look for externally_visible and used attributes and mark cgraph nodes
706 accordingly.
707
708 We cannot mark the nodes at the point the attributes are processed (in
709 handle_*_attribute) because the copy of the declarations available at that
710 point may not be canonical. For example, in:
711
712 void f();
713 void f() __attribute__((used));
714
715 the declaration we see in handle_used_attribute will be the second
716 declaration -- but the front end will subsequently merge that declaration
717 with the original declaration and discard the second declaration.
718
719 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
720
721 void f() {}
722 void f() __attribute__((externally_visible));
723
724 is valid.
725
726 So, we walk the nodes at the end of the translation unit, applying the
727 attributes at that point. */
728
729 static void
730 process_function_and_variable_attributes (struct cgraph_node *first,
731 struct varpool_node *first_var)
732 {
733 struct cgraph_node *node;
734 struct varpool_node *vnode;
735
736 for (node = cgraph_nodes; node != first; node = node->next)
737 {
738 tree decl = node->decl;
739 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
740 {
741 mark_decl_referenced (decl);
742 if (node->local.finalized)
743 cgraph_mark_needed_node (node);
744 }
745 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
746 {
747 if (! TREE_PUBLIC (node->decl))
748 warning (OPT_Wattributes,
749 "%J%<externally_visible%> attribute have effect only on public objects",
750 node->decl);
751 else
752 {
753 if (node->local.finalized)
754 cgraph_mark_needed_node (node);
755 node->local.externally_visible = true;
756 }
757 }
758 }
759 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
760 {
761 tree decl = vnode->decl;
762 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
763 {
764 mark_decl_referenced (decl);
765 if (vnode->finalized)
766 varpool_mark_needed_node (vnode);
767 }
768 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
769 {
770 if (! TREE_PUBLIC (vnode->decl))
771 warning (OPT_Wattributes,
772 "%J%<externally_visible%> attribute have effect only on public objects",
773 vnode->decl);
774 else
775 {
776 if (vnode->finalized)
777 varpool_mark_needed_node (vnode);
778 vnode->externally_visible = true;
779 }
780 }
781 }
782 }
783
784 /* Analyze the whole compilation unit once it is parsed completely. */
785
786 void
787 cgraph_finalize_compilation_unit (void)
788 {
789 struct cgraph_node *node, *next;
790 /* Keep track of already processed nodes when called multiple times for
791 intermodule optimization. */
792 static struct cgraph_node *first_analyzed;
793 struct cgraph_node *first_processed = first_analyzed;
794 static struct varpool_node *first_analyzed_var;
795
796 if (errorcount || sorrycount)
797 return;
798
799 finish_aliases_1 ();
800
801 if (!flag_unit_at_a_time)
802 {
803 cgraph_output_pending_asms ();
804 cgraph_assemble_pending_functions ();
805 varpool_output_debug_info ();
806 return;
807 }
808
809 if (!quiet_flag)
810 {
811 fprintf (stderr, "\nAnalyzing compilation unit\n");
812 fflush (stderr);
813 }
814
815 timevar_push (TV_CGRAPH);
816 process_function_and_variable_attributes (first_processed,
817 first_analyzed_var);
818 first_processed = cgraph_nodes;
819 first_analyzed_var = varpool_nodes;
820 varpool_analyze_pending_decls ();
821 if (cgraph_dump_file)
822 {
823 fprintf (cgraph_dump_file, "Initial entry points:");
824 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
825 if (node->needed && DECL_SAVED_TREE (node->decl))
826 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
827 fprintf (cgraph_dump_file, "\n");
828 }
829
830 /* Propagate reachability flag and lower representation of all reachable
831 functions. In the future, lowering will introduce new functions and
832 new entry points on the way (by template instantiation and virtual
833 method table generation for instance). */
834 while (cgraph_nodes_queue)
835 {
836 struct cgraph_edge *edge;
837 tree decl = cgraph_nodes_queue->decl;
838
839 node = cgraph_nodes_queue;
840 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
841 node->next_needed = NULL;
842
843 /* ??? It is possible to create extern inline function and later using
844 weak alias attribute to kill its body. See
845 gcc.c-torture/compile/20011119-1.c */
846 if (!DECL_SAVED_TREE (decl))
847 {
848 cgraph_reset_node (node);
849 continue;
850 }
851
852 gcc_assert (!node->analyzed && node->reachable);
853 gcc_assert (DECL_SAVED_TREE (decl));
854
855 cgraph_analyze_function (node);
856
857 for (edge = node->callees; edge; edge = edge->next_callee)
858 if (!edge->callee->reachable)
859 cgraph_mark_reachable_node (edge->callee);
860
861 /* We finalize local static variables during constructing callgraph
862 edges. Process their attributes too. */
863 process_function_and_variable_attributes (first_processed,
864 first_analyzed_var);
865 first_processed = cgraph_nodes;
866 first_analyzed_var = varpool_nodes;
867 varpool_analyze_pending_decls ();
868 }
869
870 /* Collect entry points to the unit. */
871 if (cgraph_dump_file)
872 {
873 fprintf (cgraph_dump_file, "Unit entry points:");
874 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
875 if (node->needed && DECL_SAVED_TREE (node->decl))
876 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
877 fprintf (cgraph_dump_file, "\n\nInitial ");
878 dump_cgraph (cgraph_dump_file);
879 }
880
881 if (cgraph_dump_file)
882 fprintf (cgraph_dump_file, "\nReclaiming functions:");
883
884 for (node = cgraph_nodes; node != first_analyzed; node = next)
885 {
886 tree decl = node->decl;
887 next = node->next;
888
889 if (node->local.finalized && !DECL_SAVED_TREE (decl))
890 cgraph_reset_node (node);
891
892 if (!node->reachable && DECL_SAVED_TREE (decl))
893 {
894 if (cgraph_dump_file)
895 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
896 cgraph_remove_node (node);
897 continue;
898 }
899 else
900 node->next_needed = NULL;
901 gcc_assert (!node->local.finalized || DECL_SAVED_TREE (decl));
902 gcc_assert (node->analyzed == node->local.finalized);
903 }
904 if (cgraph_dump_file)
905 {
906 fprintf (cgraph_dump_file, "\n\nReclaimed ");
907 dump_cgraph (cgraph_dump_file);
908 }
909 first_analyzed = cgraph_nodes;
910 ggc_collect ();
911 timevar_pop (TV_CGRAPH);
912 }
913 /* Figure out what functions we want to assemble. */
914
915 static void
916 cgraph_mark_functions_to_output (void)
917 {
918 struct cgraph_node *node;
919
920 for (node = cgraph_nodes; node; node = node->next)
921 {
922 tree decl = node->decl;
923 struct cgraph_edge *e;
924
925 gcc_assert (!node->output);
926
927 for (e = node->callers; e; e = e->next_caller)
928 if (e->inline_failed)
929 break;
930
931 /* We need to output all local functions that are used and not
932 always inlined, as well as those that are reachable from
933 outside the current compilation unit. */
934 if (DECL_SAVED_TREE (decl)
935 && !node->global.inlined_to
936 && (node->needed
937 || (e && node->reachable))
938 && !TREE_ASM_WRITTEN (decl)
939 && !DECL_EXTERNAL (decl))
940 node->output = 1;
941 else
942 {
943 /* We should've reclaimed all functions that are not needed. */
944 #ifdef ENABLE_CHECKING
945 if (!node->global.inlined_to && DECL_SAVED_TREE (decl)
946 && !DECL_EXTERNAL (decl))
947 {
948 dump_cgraph_node (stderr, node);
949 internal_error ("failed to reclaim unneeded function");
950 }
951 #endif
952 gcc_assert (node->global.inlined_to || !DECL_SAVED_TREE (decl)
953 || DECL_EXTERNAL (decl));
954
955 }
956
957 }
958 }
959
960 /* Expand function specified by NODE. */
961
962 static void
963 cgraph_expand_function (struct cgraph_node *node)
964 {
965 tree decl = node->decl;
966
967 /* We ought to not compile any inline clones. */
968 gcc_assert (!node->global.inlined_to);
969
970 if (flag_unit_at_a_time)
971 announce_function (decl);
972
973 gcc_assert (node->lowered);
974 /*cgraph_lower_function (node);*/
975
976 /* Generate RTL for the body of DECL. */
977 lang_hooks.callgraph.expand_function (decl);
978
979 /* Make sure that BE didn't give up on compiling. */
980 /* ??? Can happen with nested function of extern inline. */
981 gcc_assert (TREE_ASM_WRITTEN (node->decl));
982
983 current_function_decl = NULL;
984 if (!cgraph_preserve_function_body_p (node->decl))
985 {
986 cgraph_release_function_body (node);
987 /* Eliminate all call edges. This is important so the call_expr no longer
988 points to the dead function body. */
989 cgraph_node_remove_callees (node);
990 }
991
992 cgraph_function_flags_ready = true;
993 }
994
995 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
996
997 bool
998 cgraph_inline_p (struct cgraph_edge *e, const char **reason)
999 {
1000 *reason = e->inline_failed;
1001 return !e->inline_failed;
1002 }
1003
1004
1005
1006 /* Expand all functions that must be output.
1007
1008 Attempt to topologically sort the nodes so function is output when
1009 all called functions are already assembled to allow data to be
1010 propagated across the callgraph. Use a stack to get smaller distance
1011 between a function and its callees (later we may choose to use a more
1012 sophisticated algorithm for function reordering; we will likely want
1013 to use subsections to make the output functions appear in top-down
1014 order). */
1015
1016 static void
1017 cgraph_expand_all_functions (void)
1018 {
1019 struct cgraph_node *node;
1020 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1021 int order_pos = 0, new_order_pos = 0;
1022 int i;
1023
1024 order_pos = cgraph_postorder (order);
1025 gcc_assert (order_pos == cgraph_n_nodes);
1026
1027 /* Garbage collector may remove inline clones we eliminate during
1028 optimization. So we must be sure to not reference them. */
1029 for (i = 0; i < order_pos; i++)
1030 if (order[i]->output)
1031 order[new_order_pos++] = order[i];
1032
1033 for (i = new_order_pos - 1; i >= 0; i--)
1034 {
1035 node = order[i];
1036 if (node->output)
1037 {
1038 gcc_assert (node->reachable);
1039 node->output = 0;
1040 cgraph_expand_function (node);
1041 }
1042 }
1043 cgraph_process_new_functions ();
1044
1045 free (order);
1046
1047 }
1048
1049 /* This is used to sort the node types by the cgraph order number. */
1050
1051 struct cgraph_order_sort
1052 {
1053 enum { ORDER_UNDEFINED = 0, ORDER_FUNCTION, ORDER_VAR, ORDER_ASM } kind;
1054 union
1055 {
1056 struct cgraph_node *f;
1057 struct varpool_node *v;
1058 struct cgraph_asm_node *a;
1059 } u;
1060 };
1061
1062 /* Output all functions, variables, and asm statements in the order
1063 according to their order fields, which is the order in which they
1064 appeared in the file. This implements -fno-toplevel-reorder. In
1065 this mode we may output functions and variables which don't really
1066 need to be output. */
1067
1068 static void
1069 cgraph_output_in_order (void)
1070 {
1071 int max;
1072 size_t size;
1073 struct cgraph_order_sort *nodes;
1074 int i;
1075 struct cgraph_node *pf;
1076 struct varpool_node *pv;
1077 struct cgraph_asm_node *pa;
1078
1079 max = cgraph_order;
1080 size = max * sizeof (struct cgraph_order_sort);
1081 nodes = (struct cgraph_order_sort *) alloca (size);
1082 memset (nodes, 0, size);
1083
1084 varpool_analyze_pending_decls ();
1085
1086 for (pf = cgraph_nodes; pf; pf = pf->next)
1087 {
1088 if (pf->output)
1089 {
1090 i = pf->order;
1091 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1092 nodes[i].kind = ORDER_FUNCTION;
1093 nodes[i].u.f = pf;
1094 }
1095 }
1096
1097 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
1098 {
1099 i = pv->order;
1100 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1101 nodes[i].kind = ORDER_VAR;
1102 nodes[i].u.v = pv;
1103 }
1104
1105 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1106 {
1107 i = pa->order;
1108 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1109 nodes[i].kind = ORDER_ASM;
1110 nodes[i].u.a = pa;
1111 }
1112
1113 for (i = 0; i < max; ++i)
1114 {
1115 switch (nodes[i].kind)
1116 {
1117 case ORDER_FUNCTION:
1118 nodes[i].u.f->output = 0;
1119 cgraph_expand_function (nodes[i].u.f);
1120 break;
1121
1122 case ORDER_VAR:
1123 varpool_assemble_decl (nodes[i].u.v);
1124 break;
1125
1126 case ORDER_ASM:
1127 assemble_asm (nodes[i].u.a->asm_str);
1128 break;
1129
1130 case ORDER_UNDEFINED:
1131 break;
1132
1133 default:
1134 gcc_unreachable ();
1135 }
1136 }
1137
1138 cgraph_asm_nodes = NULL;
1139 }
1140
1141 /* Mark visibility of all functions.
1142
1143 A local function is one whose calls can occur only in the current
1144 compilation unit and all its calls are explicit, so we can change
1145 its calling convention. We simply mark all static functions whose
1146 address is not taken as local.
1147
1148 We also change the TREE_PUBLIC flag of all declarations that are public
1149 in language point of view but we want to overwrite this default
1150 via visibilities for the backend point of view. */
1151
1152 static void
1153 cgraph_function_and_variable_visibility (void)
1154 {
1155 struct cgraph_node *node;
1156 struct varpool_node *vnode;
1157
1158 for (node = cgraph_nodes; node; node = node->next)
1159 {
1160 if (node->reachable
1161 && (DECL_COMDAT (node->decl)
1162 || (!flag_whole_program
1163 && TREE_PUBLIC (node->decl) && !DECL_EXTERNAL (node->decl))))
1164 node->local.externally_visible = true;
1165 if (!node->local.externally_visible && node->analyzed
1166 && !DECL_EXTERNAL (node->decl))
1167 {
1168 gcc_assert (flag_whole_program || !TREE_PUBLIC (node->decl));
1169 TREE_PUBLIC (node->decl) = 0;
1170 }
1171 node->local.local = (!node->needed
1172 && node->analyzed
1173 && !DECL_EXTERNAL (node->decl)
1174 && !node->local.externally_visible);
1175 }
1176 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
1177 {
1178 if (vnode->needed
1179 && !flag_whole_program
1180 && (DECL_COMDAT (vnode->decl) || TREE_PUBLIC (vnode->decl)))
1181 vnode->externally_visible = 1;
1182 if (!vnode->externally_visible)
1183 {
1184 gcc_assert (flag_whole_program || !TREE_PUBLIC (vnode->decl));
1185 TREE_PUBLIC (vnode->decl) = 0;
1186 }
1187 gcc_assert (TREE_STATIC (vnode->decl));
1188 }
1189
1190 /* Because we have to be conservative on the boundaries of source
1191 level units, it is possible that we marked some functions in
1192 reachable just because they might be used later via external
1193 linkage, but after making them local they are really unreachable
1194 now. */
1195 cgraph_remove_unreachable_nodes (true, cgraph_dump_file);
1196
1197 if (cgraph_dump_file)
1198 {
1199 fprintf (cgraph_dump_file, "\nMarking local functions:");
1200 for (node = cgraph_nodes; node; node = node->next)
1201 if (node->local.local)
1202 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1203 fprintf (cgraph_dump_file, "\n\n");
1204 fprintf (cgraph_dump_file, "\nMarking externally visible functions:");
1205 for (node = cgraph_nodes; node; node = node->next)
1206 if (node->local.externally_visible)
1207 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1208 fprintf (cgraph_dump_file, "\n\n");
1209 }
1210 cgraph_function_flags_ready = true;
1211 }
1212
1213 /* Return true when function body of DECL still needs to be kept around
1214 for later re-use. */
1215 bool
1216 cgraph_preserve_function_body_p (tree decl)
1217 {
1218 struct cgraph_node *node;
1219 if (!cgraph_global_info_ready)
1220 return (flag_really_no_inline
1221 ? lang_hooks.tree_inlining.disregard_inline_limits (decl)
1222 : DECL_INLINE (decl));
1223 /* Look if there is any clone around. */
1224 for (node = cgraph_node (decl); node; node = node->next_clone)
1225 if (node->global.inlined_to)
1226 return true;
1227 return false;
1228 }
1229
1230 static void
1231 ipa_passes (void)
1232 {
1233 cfun = NULL;
1234 current_function_decl = NULL;
1235 tree_register_cfg_hooks ();
1236 bitmap_obstack_initialize (NULL);
1237 execute_ipa_pass_list (all_ipa_passes);
1238 bitmap_obstack_release (NULL);
1239 }
1240
1241 /* Perform simple optimizations based on callgraph. */
1242
1243 void
1244 cgraph_optimize (void)
1245 {
1246 if (errorcount || sorrycount)
1247 return;
1248
1249 #ifdef ENABLE_CHECKING
1250 verify_cgraph ();
1251 #endif
1252 if (!flag_unit_at_a_time)
1253 {
1254 cgraph_assemble_pending_functions ();
1255 cgraph_process_new_functions ();
1256 cgraph_state = CGRAPH_STATE_FINISHED;
1257 cgraph_output_pending_asms ();
1258 varpool_assemble_pending_decls ();
1259 varpool_output_debug_info ();
1260 return;
1261 }
1262
1263 /* Frontend may output common variables after the unit has been finalized.
1264 It is safe to deal with them here as they are always zero initialized. */
1265 varpool_analyze_pending_decls ();
1266 cgraph_process_new_functions ();
1267
1268 timevar_push (TV_CGRAPHOPT);
1269 if (pre_ipa_mem_report)
1270 {
1271 fprintf (stderr, "Memory consumption before IPA\n");
1272 dump_memory_report (false);
1273 }
1274 if (!quiet_flag)
1275 fprintf (stderr, "Performing interprocedural optimizations\n");
1276
1277 cgraph_function_and_variable_visibility ();
1278 if (cgraph_dump_file)
1279 {
1280 fprintf (cgraph_dump_file, "Marked ");
1281 dump_cgraph (cgraph_dump_file);
1282 }
1283 cgraph_state = CGRAPH_STATE_IPA;
1284
1285 /* Don't run the IPA passes if there was any error or sorry messages. */
1286 if (errorcount == 0 && sorrycount == 0)
1287 ipa_passes ();
1288
1289 /* This pass remove bodies of extern inline functions we never inlined.
1290 Do this later so other IPA passes see what is really going on. */
1291 cgraph_remove_unreachable_nodes (false, dump_file);
1292 cgraph_increase_alignment ();
1293 cgraph_global_info_ready = true;
1294 if (cgraph_dump_file)
1295 {
1296 fprintf (cgraph_dump_file, "Optimized ");
1297 dump_cgraph (cgraph_dump_file);
1298 dump_varpool (cgraph_dump_file);
1299 }
1300 if (post_ipa_mem_report)
1301 {
1302 fprintf (stderr, "Memory consumption after IPA\n");
1303 dump_memory_report (false);
1304 }
1305 timevar_pop (TV_CGRAPHOPT);
1306
1307 /* Output everything. */
1308 if (!quiet_flag)
1309 fprintf (stderr, "Assembling functions:\n");
1310 #ifdef ENABLE_CHECKING
1311 verify_cgraph ();
1312 #endif
1313
1314 cgraph_mark_functions_to_output ();
1315
1316 cgraph_state = CGRAPH_STATE_EXPANSION;
1317 if (!flag_toplevel_reorder)
1318 cgraph_output_in_order ();
1319 else
1320 {
1321 cgraph_output_pending_asms ();
1322
1323 cgraph_expand_all_functions ();
1324 varpool_remove_unreferenced_decls ();
1325
1326 varpool_assemble_pending_decls ();
1327 varpool_output_debug_info ();
1328 }
1329 cgraph_process_new_functions ();
1330 cgraph_state = CGRAPH_STATE_FINISHED;
1331
1332 if (cgraph_dump_file)
1333 {
1334 fprintf (cgraph_dump_file, "\nFinal ");
1335 dump_cgraph (cgraph_dump_file);
1336 }
1337 #ifdef ENABLE_CHECKING
1338 verify_cgraph ();
1339 /* Double check that all inline clones are gone and that all
1340 function bodies have been released from memory. */
1341 if (flag_unit_at_a_time
1342 && !(sorrycount || errorcount))
1343 {
1344 struct cgraph_node *node;
1345 bool error_found = false;
1346
1347 for (node = cgraph_nodes; node; node = node->next)
1348 if (node->analyzed
1349 && (node->global.inlined_to
1350 || DECL_SAVED_TREE (node->decl)))
1351 {
1352 error_found = true;
1353 dump_cgraph_node (stderr, node);
1354 }
1355 if (error_found)
1356 internal_error ("nodes with no released memory found");
1357 }
1358 #endif
1359 }
1360
1361 /* Increase alignment of global arrays to improve vectorization potential.
1362 TODO:
1363 - Consider also structs that have an array field.
1364 - Use ipa analysis to prune arrays that can't be vectorized?
1365 This should involve global alignment analysis and in the future also
1366 array padding. */
1367
1368 static void
1369 cgraph_increase_alignment (void)
1370 {
1371 if (flag_section_anchors && flag_tree_vectorize)
1372 {
1373 struct varpool_node *vnode;
1374
1375 /* Increase the alignment of all global arrays for vectorization. */
1376 for (vnode = varpool_nodes_queue;
1377 vnode;
1378 vnode = vnode->next_needed)
1379 {
1380 tree vectype, decl = vnode->decl;
1381 unsigned int alignment;
1382
1383 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
1384 continue;
1385 vectype = get_vectype_for_scalar_type (TREE_TYPE (TREE_TYPE (decl)));
1386 if (!vectype)
1387 continue;
1388 alignment = TYPE_ALIGN (vectype);
1389 if (DECL_ALIGN (decl) >= alignment)
1390 continue;
1391
1392 if (vect_can_force_dr_alignment_p (decl, alignment))
1393 {
1394 DECL_ALIGN (decl) = TYPE_ALIGN (vectype);
1395 DECL_USER_ALIGN (decl) = 1;
1396 if (cgraph_dump_file)
1397 {
1398 fprintf (cgraph_dump_file, "Increasing alignment of decl: ");
1399 print_generic_expr (cgraph_dump_file, decl, TDF_SLIM);
1400 }
1401 }
1402 }
1403 }
1404 }
1405
1406 /* Generate and emit a static constructor or destructor. WHICH must be
1407 one of 'I' or 'D'. BODY should be a STATEMENT_LIST containing
1408 GENERIC statements. */
1409
1410 void
1411 cgraph_build_static_cdtor (char which, tree body, int priority)
1412 {
1413 static int counter = 0;
1414 char which_buf[16];
1415 tree decl, name, resdecl;
1416
1417 sprintf (which_buf, "%c_%d", which, counter++);
1418 name = get_file_function_name (which_buf);
1419
1420 decl = build_decl (FUNCTION_DECL, name,
1421 build_function_type (void_type_node, void_list_node));
1422 current_function_decl = decl;
1423
1424 resdecl = build_decl (RESULT_DECL, NULL_TREE, void_type_node);
1425 DECL_ARTIFICIAL (resdecl) = 1;
1426 DECL_IGNORED_P (resdecl) = 1;
1427 DECL_RESULT (decl) = resdecl;
1428
1429 allocate_struct_function (decl);
1430
1431 TREE_STATIC (decl) = 1;
1432 TREE_USED (decl) = 1;
1433 DECL_ARTIFICIAL (decl) = 1;
1434 DECL_IGNORED_P (decl) = 1;
1435 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1436 DECL_SAVED_TREE (decl) = body;
1437 TREE_PUBLIC (decl) = ! targetm.have_ctors_dtors;
1438 DECL_UNINLINABLE (decl) = 1;
1439
1440 DECL_INITIAL (decl) = make_node (BLOCK);
1441 TREE_USED (DECL_INITIAL (decl)) = 1;
1442
1443 DECL_SOURCE_LOCATION (decl) = input_location;
1444 cfun->function_end_locus = input_location;
1445
1446 switch (which)
1447 {
1448 case 'I':
1449 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1450 break;
1451 case 'D':
1452 DECL_STATIC_DESTRUCTOR (decl) = 1;
1453 break;
1454 default:
1455 gcc_unreachable ();
1456 }
1457
1458 gimplify_function_tree (decl);
1459
1460 cgraph_add_new_function (decl, false);
1461 cgraph_mark_needed_node (cgraph_node (decl));
1462
1463 if (targetm.have_ctors_dtors)
1464 {
1465 void (*fn) (rtx, int);
1466
1467 if (which == 'I')
1468 fn = targetm.asm_out.constructor;
1469 else
1470 fn = targetm.asm_out.destructor;
1471 fn (XEXP (DECL_RTL (decl), 0), priority);
1472 }
1473 }
1474
1475 void
1476 init_cgraph (void)
1477 {
1478 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1479 }
1480
1481 /* The edges representing the callers of the NEW_VERSION node were
1482 fixed by cgraph_function_versioning (), now the call_expr in their
1483 respective tree code should be updated to call the NEW_VERSION. */
1484
1485 static void
1486 update_call_expr (struct cgraph_node *new_version)
1487 {
1488 struct cgraph_edge *e;
1489
1490 gcc_assert (new_version);
1491 for (e = new_version->callers; e; e = e->next_caller)
1492 /* Update the call expr on the edges
1493 to call the new version. */
1494 TREE_OPERAND (TREE_OPERAND (get_call_expr_in (e->call_stmt), 0), 0) = new_version->decl;
1495 }
1496
1497
1498 /* Create a new cgraph node which is the new version of
1499 OLD_VERSION node. REDIRECT_CALLERS holds the callers
1500 edges which should be redirected to point to
1501 NEW_VERSION. ALL the callees edges of OLD_VERSION
1502 are cloned to the new version node. Return the new
1503 version node. */
1504
1505 static struct cgraph_node *
1506 cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
1507 tree new_decl,
1508 VEC(cgraph_edge_p,heap) *redirect_callers)
1509 {
1510 struct cgraph_node *new_version;
1511 struct cgraph_edge *e, *new_e;
1512 struct cgraph_edge *next_callee;
1513 unsigned i;
1514
1515 gcc_assert (old_version);
1516
1517 new_version = cgraph_node (new_decl);
1518
1519 new_version->analyzed = true;
1520 new_version->local = old_version->local;
1521 new_version->global = old_version->global;
1522 new_version->rtl = new_version->rtl;
1523 new_version->reachable = true;
1524 new_version->count = old_version->count;
1525
1526 /* Clone the old node callees. Recursive calls are
1527 also cloned. */
1528 for (e = old_version->callees;e; e=e->next_callee)
1529 {
1530 new_e = cgraph_clone_edge (e, new_version, e->call_stmt, 0, e->loop_nest, true);
1531 new_e->count = e->count;
1532 }
1533 /* Fix recursive calls.
1534 If OLD_VERSION has a recursive call after the
1535 previous edge cloning, the new version will have an edge
1536 pointing to the old version, which is wrong;
1537 Redirect it to point to the new version. */
1538 for (e = new_version->callees ; e; e = next_callee)
1539 {
1540 next_callee = e->next_callee;
1541 if (e->callee == old_version)
1542 cgraph_redirect_edge_callee (e, new_version);
1543
1544 if (!next_callee)
1545 break;
1546 }
1547 for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
1548 {
1549 /* Redirect calls to the old version node to point to its new
1550 version. */
1551 cgraph_redirect_edge_callee (e, new_version);
1552 }
1553
1554 return new_version;
1555 }
1556
1557 /* Perform function versioning.
1558 Function versioning includes copying of the tree and
1559 a callgraph update (creating a new cgraph node and updating
1560 its callees and callers).
1561
1562 REDIRECT_CALLERS varray includes the edges to be redirected
1563 to the new version.
1564
1565 TREE_MAP is a mapping of tree nodes we want to replace with
1566 new ones (according to results of prior analysis).
1567 OLD_VERSION_NODE is the node that is versioned.
1568 It returns the new version's cgraph node. */
1569
1570 struct cgraph_node *
1571 cgraph_function_versioning (struct cgraph_node *old_version_node,
1572 VEC(cgraph_edge_p,heap) *redirect_callers,
1573 varray_type tree_map)
1574 {
1575 tree old_decl = old_version_node->decl;
1576 struct cgraph_node *new_version_node = NULL;
1577 tree new_decl;
1578
1579 if (!tree_versionable_function_p (old_decl))
1580 return NULL;
1581
1582 /* Make a new FUNCTION_DECL tree node for the
1583 new version. */
1584 new_decl = copy_node (old_decl);
1585
1586 /* Create the new version's call-graph node.
1587 and update the edges of the new node. */
1588 new_version_node =
1589 cgraph_copy_node_for_versioning (old_version_node, new_decl,
1590 redirect_callers);
1591
1592 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1593 tree_function_versioning (old_decl, new_decl, tree_map, false);
1594 /* Update the call_expr on the edges to call the new version node. */
1595 update_call_expr (new_version_node);
1596
1597 /* Update the new version's properties.
1598 Make The new version visible only within this translation unit.
1599 ??? We cannot use COMDAT linkage because there is no
1600 ABI support for this. */
1601 DECL_EXTERNAL (new_version_node->decl) = 0;
1602 DECL_ONE_ONLY (new_version_node->decl) = 0;
1603 TREE_PUBLIC (new_version_node->decl) = 0;
1604 DECL_COMDAT (new_version_node->decl) = 0;
1605 new_version_node->local.externally_visible = 0;
1606 new_version_node->local.local = 1;
1607 new_version_node->lowered = true;
1608 return new_version_node;
1609 }
1610
1611 /* Produce separate function body for inline clones so the offline copy can be
1612 modified without affecting them. */
1613 struct cgraph_node *
1614 save_inline_function_body (struct cgraph_node *node)
1615 {
1616 struct cgraph_node *first_clone;
1617
1618 gcc_assert (node == cgraph_node (node->decl));
1619
1620 cgraph_lower_function (node);
1621
1622 /* In non-unit-at-a-time we construct full fledged clone we never output to
1623 assembly file. This clone is pointed out by inline_decl of original function
1624 and inlining infrastructure knows how to deal with this. */
1625 if (!flag_unit_at_a_time)
1626 {
1627 struct cgraph_edge *e;
1628
1629 first_clone = cgraph_clone_node (node, node->count, 0, false);
1630 first_clone->needed = 0;
1631 first_clone->reachable = 1;
1632 /* Recursively clone all bodies. */
1633 for (e = first_clone->callees; e; e = e->next_callee)
1634 if (!e->inline_failed)
1635 cgraph_clone_inlined_nodes (e, true, false);
1636 }
1637 else
1638 first_clone = node->next_clone;
1639
1640 first_clone->decl = copy_node (node->decl);
1641 node->next_clone = NULL;
1642 if (!flag_unit_at_a_time)
1643 node->inline_decl = first_clone->decl;
1644 first_clone->prev_clone = NULL;
1645 cgraph_insert_node_to_hashtable (first_clone);
1646 gcc_assert (first_clone == cgraph_node (first_clone->decl));
1647
1648 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1649 tree_function_versioning (node->decl, first_clone->decl, NULL, true);
1650
1651 DECL_EXTERNAL (first_clone->decl) = 0;
1652 DECL_ONE_ONLY (first_clone->decl) = 0;
1653 TREE_PUBLIC (first_clone->decl) = 0;
1654 DECL_COMDAT (first_clone->decl) = 0;
1655
1656 for (node = first_clone->next_clone; node; node = node->next_clone)
1657 node->decl = first_clone->decl;
1658 #ifdef ENABLE_CHECKING
1659 verify_cgraph_node (first_clone);
1660 #endif
1661 return first_clone;
1662 }