ACKed by Jan Hubicka in http://gcc.gnu.org/bugzilla/show_bug.cgi?id=25776#c9
[gcc.git] / gcc / cgraphunit.c
1 /* Callgraph based intraprocedural optimizations.
2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
21
22 /* This module implements main driver of compilation process as well as
23 few basic intraprocedural optimizers.
24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
35 (There is one exception needed for implementing GCC extern inline function.)
36
37 - cgraph_varpool_finalize_variable
38
39 This function has same behavior as the above but is used for static
40 variables.
41
42 - cgraph_finalize_compilation_unit
43
44 This function is called once compilation unit is finalized and it will
45 no longer change.
46
47 In the unit-at-a-time the call-graph construction and local function
48 analysis takes place here. Bodies of unreachable functions are released
49 to conserve memory usage.
50
51 ??? The compilation unit in this point of view should be compilation
52 unit as defined by the language - for instance C frontend allows multiple
53 compilation units to be parsed at once and it should call function each
54 time parsing is done so we save memory.
55
56 - cgraph_optimize
57
58 In this unit-at-a-time compilation the intra procedural analysis takes
59 place here. In particular the static functions whose address is never
60 taken are marked as local. Backend can then use this information to
61 modify calling conventions, do better inlining or similar optimizations.
62
63 - cgraph_assemble_pending_functions
64 - cgraph_varpool_assemble_pending_variables
65
66 In non-unit-at-a-time mode these functions can be used to force compilation
67 of functions or variables that are known to be needed at given stage
68 of compilation
69
70 - cgraph_mark_needed_node
71 - cgraph_varpool_mark_needed_node
72
73 When function or variable is referenced by some hidden way (for instance
74 via assembly code and marked by attribute "used"), the call-graph data structure
75 must be updated accordingly by this function.
76
77 - analyze_expr callback
78
79 This function is responsible for lowering tree nodes not understood by
80 generic code into understandable ones or alternatively marking
81 callgraph and varpool nodes referenced by the as needed.
82
83 ??? On the tree-ssa genericizing should take place here and we will avoid
84 need for these hooks (replacing them by genericizing hook)
85
86 - expand_function callback
87
88 This function is used to expand function and pass it into RTL back-end.
89 Front-end should not make any assumptions about when this function can be
90 called. In particular cgraph_assemble_pending_functions,
91 cgraph_varpool_assemble_pending_variables, cgraph_finalize_function,
92 cgraph_varpool_finalize_function, cgraph_optimize can cause arbitrarily
93 previously finalized functions to be expanded.
94
95 We implement two compilation modes.
96
97 - unit-at-a-time: In this mode analyzing of all functions is deferred
98 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
99
100 In cgraph_finalize_compilation_unit the reachable functions are
101 analyzed. During analysis the call-graph edges from reachable
102 functions are constructed and their destinations are marked as
103 reachable. References to functions and variables are discovered too
104 and variables found to be needed output to the assembly file. Via
105 mark_referenced call in assemble_variable functions referenced by
106 static variables are noticed too.
107
108 The intra-procedural information is produced and its existence
109 indicated by global_info_ready. Once this flag is set it is impossible
110 to change function from !reachable to reachable and thus
111 assemble_variable no longer call mark_referenced.
112
113 Finally the call-graph is topologically sorted and all reachable functions
114 that has not been completely inlined or are not external are output.
115
116 ??? It is possible that reference to function or variable is optimized
117 out. We can not deal with this nicely because topological order is not
118 suitable for it. For tree-ssa we may consider another pass doing
119 optimization and re-discovering reachable functions.
120
121 ??? Reorganize code so variables are output very last and only if they
122 really has been referenced by produced code, so we catch more cases
123 where reference has been optimized out.
124
125 - non-unit-at-a-time
126
127 All functions are variables are output as early as possible to conserve
128 memory consumption. This may or may not result in less memory used but
129 it is still needed for some legacy code that rely on particular ordering
130 of things output from the compiler.
131
132 Varpool data structures are not used and variables are output directly.
133
134 Functions are output early using call of
135 cgraph_assemble_pending_function from cgraph_finalize_function. The
136 decision on whether function is needed is made more conservative so
137 uninlininable static functions are needed too. During the call-graph
138 construction the edge destinations are not marked as reachable and it
139 is completely relied upn assemble_variable to mark them. */
140
141
142 #include "config.h"
143 #include "system.h"
144 #include "coretypes.h"
145 #include "tm.h"
146 #include "tree.h"
147 #include "rtl.h"
148 #include "tree-flow.h"
149 #include "tree-inline.h"
150 #include "langhooks.h"
151 #include "pointer-set.h"
152 #include "toplev.h"
153 #include "flags.h"
154 #include "ggc.h"
155 #include "debug.h"
156 #include "target.h"
157 #include "cgraph.h"
158 #include "diagnostic.h"
159 #include "timevar.h"
160 #include "params.h"
161 #include "fibheap.h"
162 #include "c-common.h"
163 #include "intl.h"
164 #include "function.h"
165 #include "ipa-prop.h"
166 #include "tree-gimple.h"
167 #include "tree-pass.h"
168 #include "output.h"
169
170 static void cgraph_expand_all_functions (void);
171 static void cgraph_mark_functions_to_output (void);
172 static void cgraph_expand_function (struct cgraph_node *);
173 static tree record_reference (tree *, int *, void *);
174 static void cgraph_output_pending_asms (void);
175
176 /* Records tree nodes seen in record_reference. Simply using
177 walk_tree_without_duplicates doesn't guarantee each node is visited
178 once because it gets a new htab upon each recursive call from
179 record_reference itself. */
180 static struct pointer_set_t *visited_nodes;
181
182 static FILE *cgraph_dump_file;
183
184 /* Determine if function DECL is needed. That is, visible to something
185 either outside this translation unit, something magic in the system
186 configury, or (if not doing unit-at-a-time) to something we havn't
187 seen yet. */
188
189 static bool
190 decide_is_function_needed (struct cgraph_node *node, tree decl)
191 {
192 tree origin;
193 if (MAIN_NAME_P (DECL_NAME (decl))
194 && TREE_PUBLIC (decl))
195 {
196 node->local.externally_visible = true;
197 return true;
198 }
199
200 /* If the user told us it is used, then it must be so. */
201 if (node->local.externally_visible
202 || lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
203 return true;
204
205 /* ??? If the assembler name is set by hand, it is possible to assemble
206 the name later after finalizing the function and the fact is noticed
207 in assemble_name then. This is arguably a bug. */
208 if (DECL_ASSEMBLER_NAME_SET_P (decl)
209 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
210 return true;
211
212 /* If we decided it was needed before, but at the time we didn't have
213 the body of the function available, then it's still needed. We have
214 to go back and re-check its dependencies now. */
215 if (node->needed)
216 return true;
217
218 /* Externally visible functions must be output. The exception is
219 COMDAT functions that must be output only when they are needed.
220
221 When not optimizing, also output the static functions. (see
222 PR25962), but don't do so for always_inline functions.
223 */
224 if (((TREE_PUBLIC (decl)
225 || (!optimize && !node->local.disregard_inline_limits))
226 && !flag_whole_program)
227 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
228 return true;
229
230 /* Constructors and destructors are reachable from the runtime by
231 some mechanism. */
232 if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
233 return true;
234
235 if (flag_unit_at_a_time)
236 return false;
237
238 /* If not doing unit at a time, then we'll only defer this function
239 if its marked for inlining. Otherwise we want to emit it now. */
240
241 /* "extern inline" functions are never output locally. */
242 if (DECL_EXTERNAL (decl))
243 return false;
244 /* Nested functions of extern inline function shall not be emit unless
245 we inlined the origin. */
246 for (origin = decl_function_context (decl); origin;
247 origin = decl_function_context (origin))
248 if (DECL_EXTERNAL (origin))
249 return false;
250 /* We want to emit COMDAT functions only when absolutely necessary. */
251 if (DECL_COMDAT (decl))
252 return false;
253 if (!DECL_INLINE (decl)
254 || (!node->local.disregard_inline_limits
255 /* When declared inline, defer even the uninlinable functions.
256 This allows them to be eliminated when unused. */
257 && !DECL_DECLARED_INLINE_P (decl)
258 && (!node->local.inlinable || !cgraph_default_inline_p (node, NULL))))
259 return true;
260
261 return false;
262 }
263
264 /* Walk the decls we marked as necessary and see if they reference new
265 variables or functions and add them into the worklists. */
266 static bool
267 cgraph_varpool_analyze_pending_decls (void)
268 {
269 bool changed = false;
270 timevar_push (TV_CGRAPH);
271
272 while (cgraph_varpool_first_unanalyzed_node)
273 {
274 tree decl = cgraph_varpool_first_unanalyzed_node->decl;
275
276 cgraph_varpool_first_unanalyzed_node->analyzed = true;
277
278 cgraph_varpool_first_unanalyzed_node = cgraph_varpool_first_unanalyzed_node->next_needed;
279
280 if (DECL_INITIAL (decl))
281 {
282 visited_nodes = pointer_set_create ();
283 walk_tree (&DECL_INITIAL (decl), record_reference, NULL, visited_nodes);
284 pointer_set_destroy (visited_nodes);
285 visited_nodes = NULL;
286 }
287 changed = true;
288 }
289 timevar_pop (TV_CGRAPH);
290 return changed;
291 }
292
293 /* Optimization of function bodies might've rendered some variables as
294 unnecessary so we want to avoid these from being compiled.
295
296 This is done by pruning the queue and keeping only the variables that
297 really appear needed (ie they are either externally visible or referenced
298 by compiled function). Re-doing the reachability analysis on variables
299 brings back the remaining variables referenced by these. */
300 static void
301 cgraph_varpool_remove_unreferenced_decls (void)
302 {
303 struct cgraph_varpool_node *next, *node = cgraph_varpool_nodes_queue;
304
305 cgraph_varpool_reset_queue ();
306
307 if (errorcount || sorrycount)
308 return;
309
310 while (node)
311 {
312 tree decl = node->decl;
313 next = node->next_needed;
314 node->needed = 0;
315
316 if (node->finalized
317 && ((DECL_ASSEMBLER_NAME_SET_P (decl)
318 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
319 || node->force_output
320 || decide_is_variable_needed (node, decl)
321 /* ??? Cgraph does not yet rule the world with an iron hand,
322 and does not control the emission of debug information.
323 After a variable has its DECL_RTL set, we must assume that
324 it may be referenced by the debug information, and we can
325 no longer elide it. */
326 || DECL_RTL_SET_P (decl)))
327 cgraph_varpool_mark_needed_node (node);
328
329 node = next;
330 }
331 /* Make sure we mark alias targets as used targets. */
332 finish_aliases_1 ();
333 cgraph_varpool_analyze_pending_decls ();
334 }
335
336
337 /* When not doing unit-at-a-time, output all functions enqueued.
338 Return true when such a functions were found. */
339
340 bool
341 cgraph_assemble_pending_functions (void)
342 {
343 bool output = false;
344
345 if (flag_unit_at_a_time)
346 return false;
347
348 cgraph_output_pending_asms ();
349
350 while (cgraph_nodes_queue)
351 {
352 struct cgraph_node *n = cgraph_nodes_queue;
353
354 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
355 n->next_needed = NULL;
356 if (!n->global.inlined_to
357 && !n->alias
358 && !DECL_EXTERNAL (n->decl))
359 {
360 cgraph_expand_function (n);
361 output = true;
362 }
363 }
364
365 /* Process CGRAPH_EXPAND_QUEUE, these are functions created during
366 the expansion process. Note that this queue may grow as its
367 being processed, as the new functions may generate new ones. */
368 while (cgraph_expand_queue)
369 {
370 struct cgraph_node *n = cgraph_expand_queue;
371 cgraph_expand_queue = cgraph_expand_queue->next_needed;
372 n->next_needed = NULL;
373 cgraph_finalize_function (n->decl, false);
374 output = true;
375 }
376
377 return output;
378 }
379
380
381 /* As an GCC extension we allow redefinition of the function. The
382 semantics when both copies of bodies differ is not well defined.
383 We replace the old body with new body so in unit at a time mode
384 we always use new body, while in normal mode we may end up with
385 old body inlined into some functions and new body expanded and
386 inlined in others.
387
388 ??? It may make more sense to use one body for inlining and other
389 body for expanding the function but this is difficult to do. */
390
391 static void
392 cgraph_reset_node (struct cgraph_node *node)
393 {
394 /* If node->output is set, then this is a unit-at-a-time compilation
395 and we have already begun whole-unit analysis. This is *not*
396 testing for whether we've already emitted the function. That
397 case can be sort-of legitimately seen with real function
398 redefinition errors. I would argue that the front end should
399 never present us with such a case, but don't enforce that for now. */
400 gcc_assert (!node->output);
401
402 /* Reset our data structures so we can analyze the function again. */
403 memset (&node->local, 0, sizeof (node->local));
404 memset (&node->global, 0, sizeof (node->global));
405 memset (&node->rtl, 0, sizeof (node->rtl));
406 node->analyzed = false;
407 node->local.redefined_extern_inline = true;
408 node->local.finalized = false;
409
410 if (!flag_unit_at_a_time)
411 {
412 struct cgraph_node *n;
413
414 for (n = cgraph_nodes; n; n = n->next)
415 if (n->global.inlined_to == node)
416 cgraph_remove_node (n);
417 }
418
419 cgraph_node_remove_callees (node);
420
421 /* We may need to re-queue the node for assembling in case
422 we already proceeded it and ignored as not needed. */
423 if (node->reachable && !flag_unit_at_a_time)
424 {
425 struct cgraph_node *n;
426
427 for (n = cgraph_nodes_queue; n; n = n->next_needed)
428 if (n == node)
429 break;
430 if (!n)
431 node->reachable = 0;
432 }
433 }
434
435 static void
436 cgraph_lower_function (struct cgraph_node *node)
437 {
438 if (node->lowered)
439 return;
440 tree_lowering_passes (node->decl);
441 node->lowered = true;
442 }
443
444 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
445 logic in effect. If NESTED is true, then our caller cannot stand to have
446 the garbage collector run at the moment. We would need to either create
447 a new GC context, or just not compile right now. */
448
449 void
450 cgraph_finalize_function (tree decl, bool nested)
451 {
452 struct cgraph_node *node = cgraph_node (decl);
453
454 if (node->local.finalized)
455 cgraph_reset_node (node);
456
457 notice_global_symbol (decl);
458 node->decl = decl;
459 node->local.finalized = true;
460 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
461 if (node->nested)
462 lower_nested_functions (decl);
463 gcc_assert (!node->nested);
464
465 /* If not unit at a time, then we need to create the call graph
466 now, so that called functions can be queued and emitted now. */
467 if (!flag_unit_at_a_time)
468 {
469 cgraph_analyze_function (node);
470 cgraph_decide_inlining_incrementally (node, false);
471 }
472
473 if (decide_is_function_needed (node, decl))
474 cgraph_mark_needed_node (node);
475
476 /* Since we reclaim unreachable nodes at the end of every language
477 level unit, we need to be conservative about possible entry points
478 there. */
479 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)))
480 cgraph_mark_reachable_node (node);
481
482 /* If not unit at a time, go ahead and emit everything we've found
483 to be reachable at this time. */
484 if (!nested)
485 {
486 if (!cgraph_assemble_pending_functions ())
487 ggc_collect ();
488 }
489
490 /* If we've not yet emitted decl, tell the debug info about it. */
491 if (!TREE_ASM_WRITTEN (decl))
492 (*debug_hooks->deferred_inline_function) (decl);
493
494 /* Possibly warn about unused parameters. */
495 if (warn_unused_parameter)
496 do_warn_unused_parameter (decl);
497 }
498
499 /* Walk tree and record all calls. Called via walk_tree. */
500 static tree
501 record_reference (tree *tp, int *walk_subtrees, void *data)
502 {
503 tree t = *tp;
504
505 switch (TREE_CODE (t))
506 {
507 case VAR_DECL:
508 /* ??? Really, we should mark this decl as *potentially* referenced
509 by this function and re-examine whether the decl is actually used
510 after rtl has been generated. */
511 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
512 {
513 cgraph_varpool_mark_needed_node (cgraph_varpool_node (t));
514 if (lang_hooks.callgraph.analyze_expr)
515 return lang_hooks.callgraph.analyze_expr (tp, walk_subtrees,
516 data);
517 }
518 break;
519
520 case FDESC_EXPR:
521 case ADDR_EXPR:
522 if (flag_unit_at_a_time)
523 {
524 /* Record dereferences to the functions. This makes the
525 functions reachable unconditionally. */
526 tree decl = TREE_OPERAND (*tp, 0);
527 if (TREE_CODE (decl) == FUNCTION_DECL)
528 cgraph_mark_needed_node (cgraph_node (decl));
529 }
530 break;
531
532 default:
533 /* Save some cycles by not walking types and declaration as we
534 won't find anything useful there anyway. */
535 if (IS_TYPE_OR_DECL_P (*tp))
536 {
537 *walk_subtrees = 0;
538 break;
539 }
540
541 if ((unsigned int) TREE_CODE (t) >= LAST_AND_UNUSED_TREE_CODE)
542 return lang_hooks.callgraph.analyze_expr (tp, walk_subtrees, data);
543 break;
544 }
545
546 return NULL;
547 }
548
549 /* Create cgraph edges for function calls inside BODY from NODE. */
550
551 static void
552 cgraph_create_edges (struct cgraph_node *node, tree body)
553 {
554 basic_block bb;
555
556 struct function *this_cfun = DECL_STRUCT_FUNCTION (body);
557 block_stmt_iterator bsi;
558 tree step;
559 visited_nodes = pointer_set_create ();
560
561 /* Reach the trees by walking over the CFG, and note the
562 enclosing basic-blocks in the call edges. */
563 FOR_EACH_BB_FN (bb, this_cfun)
564 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
565 {
566 tree stmt = bsi_stmt (bsi);
567 tree call = get_call_expr_in (stmt);
568 tree decl;
569
570 if (call && (decl = get_callee_fndecl (call)))
571 {
572 cgraph_create_edge (node, cgraph_node (decl), stmt,
573 bb->count,
574 bb->loop_depth);
575 walk_tree (&TREE_OPERAND (call, 1),
576 record_reference, node, visited_nodes);
577 if (TREE_CODE (stmt) == MODIFY_EXPR)
578 walk_tree (&TREE_OPERAND (stmt, 0),
579 record_reference, node, visited_nodes);
580 }
581 else
582 walk_tree (bsi_stmt_ptr (bsi), record_reference, node, visited_nodes);
583 }
584
585 /* Look for initializers of constant variables and private statics. */
586 for (step = DECL_STRUCT_FUNCTION (body)->unexpanded_var_list;
587 step;
588 step = TREE_CHAIN (step))
589 {
590 tree decl = TREE_VALUE (step);
591 if (TREE_CODE (decl) == VAR_DECL
592 && (TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
593 && flag_unit_at_a_time)
594 cgraph_varpool_finalize_decl (decl);
595 else if (TREE_CODE (decl) == VAR_DECL && DECL_INITIAL (decl))
596 walk_tree (&DECL_INITIAL (decl), record_reference, node, visited_nodes);
597 }
598
599 pointer_set_destroy (visited_nodes);
600 visited_nodes = NULL;
601 }
602
603 /* Give initial reasons why inlining would fail. Those gets
604 either NULLified or usually overwritten by more precise reason
605 later. */
606 static void
607 initialize_inline_failed (struct cgraph_node *node)
608 {
609 struct cgraph_edge *e;
610
611 for (e = node->callers; e; e = e->next_caller)
612 {
613 gcc_assert (!e->callee->global.inlined_to);
614 gcc_assert (e->inline_failed);
615 if (node->local.redefined_extern_inline)
616 e->inline_failed = N_("redefined extern inline functions are not "
617 "considered for inlining");
618 else if (!node->local.inlinable)
619 e->inline_failed = N_("function not inlinable");
620 else
621 e->inline_failed = N_("function not considered for inlining");
622 }
623 }
624
625 /* Rebuild call edges from current function after a passes not aware
626 of cgraph updating. */
627 static unsigned int
628 rebuild_cgraph_edges (void)
629 {
630 basic_block bb;
631 struct cgraph_node *node = cgraph_node (current_function_decl);
632 block_stmt_iterator bsi;
633
634 cgraph_node_remove_callees (node);
635
636 node->count = ENTRY_BLOCK_PTR->count;
637
638 FOR_EACH_BB (bb)
639 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
640 {
641 tree stmt = bsi_stmt (bsi);
642 tree call = get_call_expr_in (stmt);
643 tree decl;
644
645 if (call && (decl = get_callee_fndecl (call)))
646 cgraph_create_edge (node, cgraph_node (decl), stmt,
647 bb->count,
648 bb->loop_depth);
649 }
650 initialize_inline_failed (node);
651 gcc_assert (!node->global.inlined_to);
652 return 0;
653 }
654
655 struct tree_opt_pass pass_rebuild_cgraph_edges =
656 {
657 NULL, /* name */
658 NULL, /* gate */
659 rebuild_cgraph_edges, /* execute */
660 NULL, /* sub */
661 NULL, /* next */
662 0, /* static_pass_number */
663 0, /* tv_id */
664 PROP_cfg, /* properties_required */
665 0, /* properties_provided */
666 0, /* properties_destroyed */
667 0, /* todo_flags_start */
668 0, /* todo_flags_finish */
669 0 /* letter */
670 };
671
672 /* Verify cgraph nodes of given cgraph node. */
673 void
674 verify_cgraph_node (struct cgraph_node *node)
675 {
676 struct cgraph_edge *e;
677 struct cgraph_node *main_clone;
678 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
679 basic_block this_block;
680 block_stmt_iterator bsi;
681 bool error_found = false;
682
683 timevar_push (TV_CGRAPH_VERIFY);
684 for (e = node->callees; e; e = e->next_callee)
685 if (e->aux)
686 {
687 error ("aux field set for edge %s->%s",
688 cgraph_node_name (e->caller), cgraph_node_name (e->callee));
689 error_found = true;
690 }
691 if (node->count < 0)
692 {
693 error ("Execution count is negative");
694 error_found = true;
695 }
696 for (e = node->callers; e; e = e->next_caller)
697 {
698 if (e->count < 0)
699 {
700 error ("caller edge count is negative");
701 error_found = true;
702 }
703 if (!e->inline_failed)
704 {
705 if (node->global.inlined_to
706 != (e->caller->global.inlined_to
707 ? e->caller->global.inlined_to : e->caller))
708 {
709 error ("inlined_to pointer is wrong");
710 error_found = true;
711 }
712 if (node->callers->next_caller)
713 {
714 error ("multiple inline callers");
715 error_found = true;
716 }
717 }
718 else
719 if (node->global.inlined_to)
720 {
721 error ("inlined_to pointer set for noninline callers");
722 error_found = true;
723 }
724 }
725 if (!node->callers && node->global.inlined_to)
726 {
727 error ("inlined_to pointer is set but no predecessors found");
728 error_found = true;
729 }
730 if (node->global.inlined_to == node)
731 {
732 error ("inlined_to pointer refers to itself");
733 error_found = true;
734 }
735
736 for (main_clone = cgraph_node (node->decl); main_clone;
737 main_clone = main_clone->next_clone)
738 if (main_clone == node)
739 break;
740 if (!cgraph_node (node->decl))
741 {
742 error ("node not found in cgraph_hash");
743 error_found = true;
744 }
745
746 if (node->analyzed
747 && DECL_SAVED_TREE (node->decl) && !TREE_ASM_WRITTEN (node->decl)
748 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to))
749 {
750 if (this_cfun->cfg)
751 {
752 /* The nodes we're interested in are never shared, so walk
753 the tree ignoring duplicates. */
754 visited_nodes = pointer_set_create ();
755 /* Reach the trees by walking over the CFG, and note the
756 enclosing basic-blocks in the call edges. */
757 FOR_EACH_BB_FN (this_block, this_cfun)
758 for (bsi = bsi_start (this_block); !bsi_end_p (bsi); bsi_next (&bsi))
759 {
760 tree stmt = bsi_stmt (bsi);
761 tree call = get_call_expr_in (stmt);
762 tree decl;
763 if (call && (decl = get_callee_fndecl (call)))
764 {
765 struct cgraph_edge *e = cgraph_edge (node, stmt);
766 if (e)
767 {
768 if (e->aux)
769 {
770 error ("shared call_stmt:");
771 debug_generic_stmt (stmt);
772 error_found = true;
773 }
774 if (e->callee->decl != cgraph_node (decl)->decl
775 && e->inline_failed)
776 {
777 error ("edge points to wrong declaration:");
778 debug_tree (e->callee->decl);
779 fprintf (stderr," Instead of:");
780 debug_tree (decl);
781 }
782 e->aux = (void *)1;
783 }
784 else
785 {
786 error ("missing callgraph edge for call stmt:");
787 debug_generic_stmt (stmt);
788 error_found = true;
789 }
790 }
791 }
792 pointer_set_destroy (visited_nodes);
793 visited_nodes = NULL;
794 }
795 else
796 /* No CFG available?! */
797 gcc_unreachable ();
798
799 for (e = node->callees; e; e = e->next_callee)
800 {
801 if (!e->aux)
802 {
803 error ("edge %s->%s has no corresponding call_stmt",
804 cgraph_node_name (e->caller),
805 cgraph_node_name (e->callee));
806 debug_generic_stmt (e->call_stmt);
807 error_found = true;
808 }
809 e->aux = 0;
810 }
811 }
812 if (error_found)
813 {
814 dump_cgraph_node (stderr, node);
815 internal_error ("verify_cgraph_node failed");
816 }
817 timevar_pop (TV_CGRAPH_VERIFY);
818 }
819
820 /* Verify whole cgraph structure. */
821 void
822 verify_cgraph (void)
823 {
824 struct cgraph_node *node;
825
826 if (sorrycount || errorcount)
827 return;
828
829 for (node = cgraph_nodes; node; node = node->next)
830 verify_cgraph_node (node);
831 }
832
833 /* Output one variable, if necessary. Return whether we output it. */
834 static bool
835 cgraph_varpool_assemble_decl (struct cgraph_varpool_node *node)
836 {
837 tree decl = node->decl;
838
839 if (!TREE_ASM_WRITTEN (decl)
840 && !node->alias
841 && !DECL_EXTERNAL (decl)
842 && (TREE_CODE (decl) != VAR_DECL || !DECL_HAS_VALUE_EXPR_P (decl)))
843 {
844 assemble_variable (decl, 0, 1, 0);
845 /* Local static variables are never seen by check_global_declarations
846 so we need to output debug info by hand. */
847 if (DECL_CONTEXT (decl)
848 && (TREE_CODE (DECL_CONTEXT (decl)) == BLOCK
849 || TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
850 && errorcount == 0 && sorrycount == 0)
851 {
852 timevar_push (TV_SYMOUT);
853 (*debug_hooks->global_decl) (decl);
854 timevar_pop (TV_SYMOUT);
855 }
856 return true;
857 }
858
859 return false;
860 }
861
862 /* Output all variables enqueued to be assembled. */
863 bool
864 cgraph_varpool_assemble_pending_decls (void)
865 {
866 bool changed = false;
867
868 if (errorcount || sorrycount)
869 return false;
870
871 /* EH might mark decls as needed during expansion. This should be safe since
872 we don't create references to new function, but it should not be used
873 elsewhere. */
874 cgraph_varpool_analyze_pending_decls ();
875
876 while (cgraph_varpool_nodes_queue)
877 {
878 struct cgraph_varpool_node *node = cgraph_varpool_nodes_queue;
879
880 cgraph_varpool_nodes_queue = cgraph_varpool_nodes_queue->next_needed;
881 if (cgraph_varpool_assemble_decl (node))
882 changed = true;
883 node->next_needed = NULL;
884 }
885 return changed;
886 }
887
888 /* Output all asm statements we have stored up to be output. */
889
890 static void
891 cgraph_output_pending_asms (void)
892 {
893 struct cgraph_asm_node *can;
894
895 if (errorcount || sorrycount)
896 return;
897
898 for (can = cgraph_asm_nodes; can; can = can->next)
899 assemble_asm (can->asm_str);
900 cgraph_asm_nodes = NULL;
901 }
902
903 /* Analyze the function scheduled to be output. */
904 void
905 cgraph_analyze_function (struct cgraph_node *node)
906 {
907 tree decl = node->decl;
908
909 current_function_decl = decl;
910 push_cfun (DECL_STRUCT_FUNCTION (decl));
911 cgraph_lower_function (node);
912
913 /* First kill forward declaration so reverse inlining works properly. */
914 cgraph_create_edges (node, decl);
915
916 node->local.inlinable = tree_inlinable_function_p (decl);
917 node->local.self_insns = estimate_num_insns (decl);
918 if (node->local.inlinable)
919 node->local.disregard_inline_limits
920 = lang_hooks.tree_inlining.disregard_inline_limits (decl);
921 initialize_inline_failed (node);
922 if (flag_really_no_inline && !node->local.disregard_inline_limits)
923 node->local.inlinable = 0;
924 /* Inlining characteristics are maintained by the cgraph_mark_inline. */
925 node->global.insns = node->local.self_insns;
926
927 node->analyzed = true;
928 pop_cfun ();
929 current_function_decl = NULL;
930 }
931
932 /* Analyze the whole compilation unit once it is parsed completely. */
933
934 void
935 cgraph_finalize_compilation_unit (void)
936 {
937 struct cgraph_node *node;
938 /* Keep track of already processed nodes when called multiple times for
939 intermodule optimization. */
940 static struct cgraph_node *first_analyzed;
941
942 finish_aliases_1 ();
943
944 if (!flag_unit_at_a_time)
945 {
946 cgraph_output_pending_asms ();
947 cgraph_assemble_pending_functions ();
948 return;
949 }
950
951 if (!quiet_flag)
952 {
953 fprintf (stderr, "\nAnalyzing compilation unit");
954 fflush (stderr);
955 }
956
957 timevar_push (TV_CGRAPH);
958 cgraph_varpool_analyze_pending_decls ();
959 if (cgraph_dump_file)
960 {
961 fprintf (cgraph_dump_file, "Initial entry points:");
962 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
963 if (node->needed && DECL_SAVED_TREE (node->decl))
964 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
965 fprintf (cgraph_dump_file, "\n");
966 }
967
968 /* Propagate reachability flag and lower representation of all reachable
969 functions. In the future, lowering will introduce new functions and
970 new entry points on the way (by template instantiation and virtual
971 method table generation for instance). */
972 while (cgraph_nodes_queue)
973 {
974 struct cgraph_edge *edge;
975 tree decl = cgraph_nodes_queue->decl;
976
977 node = cgraph_nodes_queue;
978 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
979 node->next_needed = NULL;
980
981 /* ??? It is possible to create extern inline function and later using
982 weak alias attribute to kill its body. See
983 gcc.c-torture/compile/20011119-1.c */
984 if (!DECL_SAVED_TREE (decl))
985 {
986 cgraph_reset_node (node);
987 continue;
988 }
989
990 gcc_assert (!node->analyzed && node->reachable);
991 gcc_assert (DECL_SAVED_TREE (decl));
992
993 cgraph_analyze_function (node);
994
995 for (edge = node->callees; edge; edge = edge->next_callee)
996 if (!edge->callee->reachable)
997 cgraph_mark_reachable_node (edge->callee);
998
999 cgraph_varpool_analyze_pending_decls ();
1000 }
1001
1002 /* Collect entry points to the unit. */
1003
1004 if (cgraph_dump_file)
1005 {
1006 fprintf (cgraph_dump_file, "Unit entry points:");
1007 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1008 if (node->needed && DECL_SAVED_TREE (node->decl))
1009 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1010 fprintf (cgraph_dump_file, "\n\nInitial ");
1011 dump_cgraph (cgraph_dump_file);
1012 }
1013
1014 if (cgraph_dump_file)
1015 fprintf (cgraph_dump_file, "\nReclaiming functions:");
1016
1017 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1018 {
1019 tree decl = node->decl;
1020
1021 if (node->local.finalized && !DECL_SAVED_TREE (decl))
1022 cgraph_reset_node (node);
1023
1024 if (!node->reachable && DECL_SAVED_TREE (decl))
1025 {
1026 if (cgraph_dump_file)
1027 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1028 cgraph_remove_node (node);
1029 continue;
1030 }
1031 else
1032 node->next_needed = NULL;
1033 gcc_assert (!node->local.finalized || DECL_SAVED_TREE (decl));
1034 gcc_assert (node->analyzed == node->local.finalized);
1035 }
1036 if (cgraph_dump_file)
1037 {
1038 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1039 dump_cgraph (cgraph_dump_file);
1040 }
1041 first_analyzed = cgraph_nodes;
1042 ggc_collect ();
1043 timevar_pop (TV_CGRAPH);
1044 }
1045 /* Figure out what functions we want to assemble. */
1046
1047 static void
1048 cgraph_mark_functions_to_output (void)
1049 {
1050 struct cgraph_node *node;
1051
1052 for (node = cgraph_nodes; node; node = node->next)
1053 {
1054 tree decl = node->decl;
1055 struct cgraph_edge *e;
1056
1057 gcc_assert (!node->output);
1058
1059 for (e = node->callers; e; e = e->next_caller)
1060 if (e->inline_failed)
1061 break;
1062
1063 /* We need to output all local functions that are used and not
1064 always inlined, as well as those that are reachable from
1065 outside the current compilation unit. */
1066 if (DECL_SAVED_TREE (decl)
1067 && !node->global.inlined_to
1068 && (node->needed
1069 || (e && node->reachable))
1070 && !TREE_ASM_WRITTEN (decl)
1071 && !DECL_EXTERNAL (decl))
1072 node->output = 1;
1073 else
1074 {
1075 /* We should've reclaimed all functions that are not needed. */
1076 #ifdef ENABLE_CHECKING
1077 if (!node->global.inlined_to && DECL_SAVED_TREE (decl)
1078 && !DECL_EXTERNAL (decl))
1079 {
1080 dump_cgraph_node (stderr, node);
1081 internal_error ("failed to reclaim unneeded function");
1082 }
1083 #endif
1084 gcc_assert (node->global.inlined_to || !DECL_SAVED_TREE (decl)
1085 || DECL_EXTERNAL (decl));
1086
1087 }
1088
1089 }
1090 }
1091
1092 /* Expand function specified by NODE. */
1093
1094 static void
1095 cgraph_expand_function (struct cgraph_node *node)
1096 {
1097 tree decl = node->decl;
1098
1099 /* We ought to not compile any inline clones. */
1100 gcc_assert (!node->global.inlined_to);
1101
1102 if (flag_unit_at_a_time)
1103 announce_function (decl);
1104
1105 cgraph_lower_function (node);
1106
1107 /* Generate RTL for the body of DECL. */
1108 lang_hooks.callgraph.expand_function (decl);
1109
1110 /* Make sure that BE didn't give up on compiling. */
1111 /* ??? Can happen with nested function of extern inline. */
1112 gcc_assert (TREE_ASM_WRITTEN (node->decl));
1113
1114 current_function_decl = NULL;
1115 if (!cgraph_preserve_function_body_p (node->decl))
1116 {
1117 DECL_SAVED_TREE (node->decl) = NULL;
1118 DECL_STRUCT_FUNCTION (node->decl) = NULL;
1119 DECL_INITIAL (node->decl) = error_mark_node;
1120 /* Eliminate all call edges. This is important so the call_expr no longer
1121 points to the dead function body. */
1122 cgraph_node_remove_callees (node);
1123 }
1124
1125 cgraph_function_flags_ready = true;
1126 }
1127
1128 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
1129
1130 bool
1131 cgraph_inline_p (struct cgraph_edge *e, const char **reason)
1132 {
1133 *reason = e->inline_failed;
1134 return !e->inline_failed;
1135 }
1136
1137
1138
1139 /* Expand all functions that must be output.
1140
1141 Attempt to topologically sort the nodes so function is output when
1142 all called functions are already assembled to allow data to be
1143 propagated across the callgraph. Use a stack to get smaller distance
1144 between a function and its callees (later we may choose to use a more
1145 sophisticated algorithm for function reordering; we will likely want
1146 to use subsections to make the output functions appear in top-down
1147 order). */
1148
1149 static void
1150 cgraph_expand_all_functions (void)
1151 {
1152 struct cgraph_node *node;
1153 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1154 int order_pos = 0, new_order_pos = 0;
1155 int i;
1156
1157 order_pos = cgraph_postorder (order);
1158 gcc_assert (order_pos == cgraph_n_nodes);
1159
1160 /* Garbage collector may remove inline clones we eliminate during
1161 optimization. So we must be sure to not reference them. */
1162 for (i = 0; i < order_pos; i++)
1163 if (order[i]->output)
1164 order[new_order_pos++] = order[i];
1165
1166 for (i = new_order_pos - 1; i >= 0; i--)
1167 {
1168 node = order[i];
1169 if (node->output)
1170 {
1171 gcc_assert (node->reachable);
1172 node->output = 0;
1173 cgraph_expand_function (node);
1174 }
1175 }
1176
1177 free (order);
1178
1179 /* Process CGRAPH_EXPAND_QUEUE, these are functions created during
1180 the expansion process. Note that this queue may grow as its
1181 being processed, as the new functions may generate new ones. */
1182 while (cgraph_expand_queue)
1183 {
1184 node = cgraph_expand_queue;
1185 cgraph_expand_queue = cgraph_expand_queue->next_needed;
1186 node->next_needed = NULL;
1187 node->output = 0;
1188 node->lowered = DECL_STRUCT_FUNCTION (node->decl)->cfg != NULL;
1189 cgraph_expand_function (node);
1190 }
1191 }
1192
1193 /* This is used to sort the node types by the cgraph order number. */
1194
1195 struct cgraph_order_sort
1196 {
1197 enum { ORDER_UNDEFINED = 0, ORDER_FUNCTION, ORDER_VAR, ORDER_ASM } kind;
1198 union
1199 {
1200 struct cgraph_node *f;
1201 struct cgraph_varpool_node *v;
1202 struct cgraph_asm_node *a;
1203 } u;
1204 };
1205
1206 /* Output all functions, variables, and asm statements in the order
1207 according to their order fields, which is the order in which they
1208 appeared in the file. This implements -fno-toplevel-reorder. In
1209 this mode we may output functions and variables which don't really
1210 need to be output. */
1211
1212 static void
1213 cgraph_output_in_order (void)
1214 {
1215 int max;
1216 size_t size;
1217 struct cgraph_order_sort *nodes;
1218 int i;
1219 struct cgraph_node *pf;
1220 struct cgraph_varpool_node *pv;
1221 struct cgraph_asm_node *pa;
1222
1223 max = cgraph_order;
1224 size = max * sizeof (struct cgraph_order_sort);
1225 nodes = (struct cgraph_order_sort *) alloca (size);
1226 memset (nodes, 0, size);
1227
1228 cgraph_varpool_analyze_pending_decls ();
1229
1230 for (pf = cgraph_nodes; pf; pf = pf->next)
1231 {
1232 if (pf->output)
1233 {
1234 i = pf->order;
1235 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1236 nodes[i].kind = ORDER_FUNCTION;
1237 nodes[i].u.f = pf;
1238 }
1239 }
1240
1241 for (pv = cgraph_varpool_nodes_queue; pv; pv = pv->next_needed)
1242 {
1243 i = pv->order;
1244 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1245 nodes[i].kind = ORDER_VAR;
1246 nodes[i].u.v = pv;
1247 }
1248
1249 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1250 {
1251 i = pa->order;
1252 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1253 nodes[i].kind = ORDER_ASM;
1254 nodes[i].u.a = pa;
1255 }
1256
1257 for (i = 0; i < max; ++i)
1258 {
1259 switch (nodes[i].kind)
1260 {
1261 case ORDER_FUNCTION:
1262 nodes[i].u.f->output = 0;
1263 cgraph_expand_function (nodes[i].u.f);
1264 break;
1265
1266 case ORDER_VAR:
1267 cgraph_varpool_assemble_decl (nodes[i].u.v);
1268 break;
1269
1270 case ORDER_ASM:
1271 assemble_asm (nodes[i].u.a->asm_str);
1272 break;
1273
1274 case ORDER_UNDEFINED:
1275 break;
1276
1277 default:
1278 gcc_unreachable ();
1279 }
1280 }
1281
1282 cgraph_asm_nodes = NULL;
1283 }
1284
1285 /* Mark visibility of all functions.
1286
1287 A local function is one whose calls can occur only in the current
1288 compilation unit and all its calls are explicit, so we can change
1289 its calling convention. We simply mark all static functions whose
1290 address is not taken as local.
1291
1292 We also change the TREE_PUBLIC flag of all declarations that are public
1293 in language point of view but we want to overwrite this default
1294 via visibilities for the backend point of view. */
1295
1296 static void
1297 cgraph_function_and_variable_visibility (void)
1298 {
1299 struct cgraph_node *node;
1300 struct cgraph_varpool_node *vnode;
1301
1302 for (node = cgraph_nodes; node; node = node->next)
1303 {
1304 if (node->reachable
1305 && (DECL_COMDAT (node->decl)
1306 || (!flag_whole_program
1307 && TREE_PUBLIC (node->decl) && !DECL_EXTERNAL (node->decl))))
1308 node->local.externally_visible = true;
1309 if (!node->local.externally_visible && node->analyzed
1310 && !DECL_EXTERNAL (node->decl))
1311 {
1312 gcc_assert (flag_whole_program || !TREE_PUBLIC (node->decl));
1313 TREE_PUBLIC (node->decl) = 0;
1314 }
1315 node->local.local = (!node->needed
1316 && node->analyzed
1317 && !DECL_EXTERNAL (node->decl)
1318 && !node->local.externally_visible);
1319 }
1320 for (vnode = cgraph_varpool_nodes_queue; vnode; vnode = vnode->next_needed)
1321 {
1322 if (vnode->needed
1323 && !flag_whole_program
1324 && (DECL_COMDAT (vnode->decl) || TREE_PUBLIC (vnode->decl)))
1325 vnode->externally_visible = 1;
1326 if (!vnode->externally_visible)
1327 {
1328 gcc_assert (flag_whole_program || !TREE_PUBLIC (vnode->decl));
1329 TREE_PUBLIC (vnode->decl) = 0;
1330 }
1331 gcc_assert (TREE_STATIC (vnode->decl));
1332 }
1333
1334 /* Because we have to be conservative on the boundaries of source
1335 level units, it is possible that we marked some functions in
1336 reachable just because they might be used later via external
1337 linkage, but after making them local they are really unreachable
1338 now. */
1339 cgraph_remove_unreachable_nodes (true, cgraph_dump_file);
1340
1341 if (cgraph_dump_file)
1342 {
1343 fprintf (cgraph_dump_file, "\nMarking local functions:");
1344 for (node = cgraph_nodes; node; node = node->next)
1345 if (node->local.local)
1346 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1347 fprintf (cgraph_dump_file, "\n\n");
1348 fprintf (cgraph_dump_file, "\nMarking externally visible functions:");
1349 for (node = cgraph_nodes; node; node = node->next)
1350 if (node->local.externally_visible)
1351 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1352 fprintf (cgraph_dump_file, "\n\n");
1353 }
1354 cgraph_function_flags_ready = true;
1355 }
1356
1357 /* Return true when function body of DECL still needs to be kept around
1358 for later re-use. */
1359 bool
1360 cgraph_preserve_function_body_p (tree decl)
1361 {
1362 struct cgraph_node *node;
1363 if (!cgraph_global_info_ready)
1364 return (DECL_INLINE (decl) && !flag_really_no_inline);
1365 /* Look if there is any clone around. */
1366 for (node = cgraph_node (decl); node; node = node->next_clone)
1367 if (node->global.inlined_to)
1368 return true;
1369 return false;
1370 }
1371
1372 static void
1373 ipa_passes (void)
1374 {
1375 cfun = NULL;
1376 tree_register_cfg_hooks ();
1377 bitmap_obstack_initialize (NULL);
1378 execute_ipa_pass_list (all_ipa_passes);
1379 bitmap_obstack_release (NULL);
1380 }
1381
1382 /* Perform simple optimizations based on callgraph. */
1383
1384 void
1385 cgraph_optimize (void)
1386 {
1387 #ifdef ENABLE_CHECKING
1388 verify_cgraph ();
1389 #endif
1390 if (!flag_unit_at_a_time)
1391 {
1392 cgraph_output_pending_asms ();
1393 cgraph_varpool_assemble_pending_decls ();
1394 return;
1395 }
1396
1397 process_pending_assemble_externals ();
1398
1399 /* Frontend may output common variables after the unit has been finalized.
1400 It is safe to deal with them here as they are always zero initialized. */
1401 cgraph_varpool_analyze_pending_decls ();
1402
1403 timevar_push (TV_CGRAPHOPT);
1404 if (!quiet_flag)
1405 fprintf (stderr, "Performing intraprocedural optimizations\n");
1406
1407 cgraph_function_and_variable_visibility ();
1408 if (cgraph_dump_file)
1409 {
1410 fprintf (cgraph_dump_file, "Marked ");
1411 dump_cgraph (cgraph_dump_file);
1412 }
1413
1414 /* Don't run the IPA passes if there was any error or sorry messages. */
1415 if (errorcount == 0 && sorrycount == 0)
1416 ipa_passes ();
1417
1418 /* This pass remove bodies of extern inline functions we never inlined.
1419 Do this later so other IPA passes see what is really going on. */
1420 cgraph_remove_unreachable_nodes (false, dump_file);
1421 cgraph_global_info_ready = true;
1422 if (cgraph_dump_file)
1423 {
1424 fprintf (cgraph_dump_file, "Optimized ");
1425 dump_cgraph (cgraph_dump_file);
1426 dump_varpool (cgraph_dump_file);
1427 }
1428 timevar_pop (TV_CGRAPHOPT);
1429
1430 /* Output everything. */
1431 if (!quiet_flag)
1432 fprintf (stderr, "Assembling functions:\n");
1433 #ifdef ENABLE_CHECKING
1434 verify_cgraph ();
1435 #endif
1436
1437 cgraph_mark_functions_to_output ();
1438
1439 if (!flag_toplevel_reorder)
1440 cgraph_output_in_order ();
1441 else
1442 {
1443 cgraph_output_pending_asms ();
1444
1445 cgraph_expand_all_functions ();
1446 cgraph_varpool_remove_unreferenced_decls ();
1447
1448 cgraph_varpool_assemble_pending_decls ();
1449 }
1450
1451 if (cgraph_dump_file)
1452 {
1453 fprintf (cgraph_dump_file, "\nFinal ");
1454 dump_cgraph (cgraph_dump_file);
1455 }
1456 #ifdef ENABLE_CHECKING
1457 verify_cgraph ();
1458 /* Double check that all inline clones are gone and that all
1459 function bodies have been released from memory. */
1460 if (flag_unit_at_a_time
1461 && !dump_enabled_p (TDI_tree_all)
1462 && !(sorrycount || errorcount))
1463 {
1464 struct cgraph_node *node;
1465 bool error_found = false;
1466
1467 for (node = cgraph_nodes; node; node = node->next)
1468 if (node->analyzed
1469 && (node->global.inlined_to
1470 || DECL_SAVED_TREE (node->decl)))
1471 {
1472 error_found = true;
1473 dump_cgraph_node (stderr, node);
1474 }
1475 if (error_found)
1476 internal_error ("nodes with no released memory found");
1477 }
1478 #endif
1479 }
1480
1481 /* Generate and emit a static constructor or destructor. WHICH must be
1482 one of 'I' or 'D'. BODY should be a STATEMENT_LIST containing
1483 GENERIC statements. */
1484
1485 void
1486 cgraph_build_static_cdtor (char which, tree body, int priority)
1487 {
1488 static int counter = 0;
1489 char which_buf[16];
1490 tree decl, name, resdecl;
1491
1492 sprintf (which_buf, "%c_%d", which, counter++);
1493 name = get_file_function_name_long (which_buf);
1494
1495 decl = build_decl (FUNCTION_DECL, name,
1496 build_function_type (void_type_node, void_list_node));
1497 current_function_decl = decl;
1498
1499 resdecl = build_decl (RESULT_DECL, NULL_TREE, void_type_node);
1500 DECL_ARTIFICIAL (resdecl) = 1;
1501 DECL_IGNORED_P (resdecl) = 1;
1502 DECL_RESULT (decl) = resdecl;
1503
1504 allocate_struct_function (decl);
1505
1506 TREE_STATIC (decl) = 1;
1507 TREE_USED (decl) = 1;
1508 DECL_ARTIFICIAL (decl) = 1;
1509 DECL_IGNORED_P (decl) = 1;
1510 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1511 DECL_SAVED_TREE (decl) = body;
1512 TREE_PUBLIC (decl) = ! targetm.have_ctors_dtors;
1513 DECL_UNINLINABLE (decl) = 1;
1514
1515 DECL_INITIAL (decl) = make_node (BLOCK);
1516 TREE_USED (DECL_INITIAL (decl)) = 1;
1517
1518 DECL_SOURCE_LOCATION (decl) = input_location;
1519 cfun->function_end_locus = input_location;
1520
1521 switch (which)
1522 {
1523 case 'I':
1524 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1525 break;
1526 case 'D':
1527 DECL_STATIC_DESTRUCTOR (decl) = 1;
1528 break;
1529 default:
1530 gcc_unreachable ();
1531 }
1532
1533 gimplify_function_tree (decl);
1534
1535 /* ??? We will get called LATE in the compilation process. */
1536 if (cgraph_global_info_ready)
1537 {
1538 tree_lowering_passes (decl);
1539 tree_rest_of_compilation (decl);
1540 }
1541 else
1542 cgraph_finalize_function (decl, 0);
1543
1544 if (targetm.have_ctors_dtors)
1545 {
1546 void (*fn) (rtx, int);
1547
1548 if (which == 'I')
1549 fn = targetm.asm_out.constructor;
1550 else
1551 fn = targetm.asm_out.destructor;
1552 fn (XEXP (DECL_RTL (decl), 0), priority);
1553 }
1554 }
1555
1556 void
1557 init_cgraph (void)
1558 {
1559 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1560 }
1561
1562 /* The edges representing the callers of the NEW_VERSION node were
1563 fixed by cgraph_function_versioning (), now the call_expr in their
1564 respective tree code should be updated to call the NEW_VERSION. */
1565
1566 static void
1567 update_call_expr (struct cgraph_node *new_version)
1568 {
1569 struct cgraph_edge *e;
1570
1571 gcc_assert (new_version);
1572 for (e = new_version->callers; e; e = e->next_caller)
1573 /* Update the call expr on the edges
1574 to call the new version. */
1575 TREE_OPERAND (TREE_OPERAND (get_call_expr_in (e->call_stmt), 0), 0) = new_version->decl;
1576 }
1577
1578
1579 /* Create a new cgraph node which is the new version of
1580 OLD_VERSION node. REDIRECT_CALLERS holds the callers
1581 edges which should be redirected to point to
1582 NEW_VERSION. ALL the callees edges of OLD_VERSION
1583 are cloned to the new version node. Return the new
1584 version node. */
1585
1586 static struct cgraph_node *
1587 cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
1588 tree new_decl,
1589 VEC(cgraph_edge_p,heap) *redirect_callers)
1590 {
1591 struct cgraph_node *new_version;
1592 struct cgraph_edge *e, *new_e;
1593 struct cgraph_edge *next_callee;
1594 unsigned i;
1595
1596 gcc_assert (old_version);
1597
1598 new_version = cgraph_node (new_decl);
1599
1600 new_version->analyzed = true;
1601 new_version->local = old_version->local;
1602 new_version->global = old_version->global;
1603 new_version->rtl = new_version->rtl;
1604 new_version->reachable = true;
1605 new_version->count = old_version->count;
1606
1607 /* Clone the old node callees. Recursive calls are
1608 also cloned. */
1609 for (e = old_version->callees;e; e=e->next_callee)
1610 {
1611 new_e = cgraph_clone_edge (e, new_version, e->call_stmt, 0, e->loop_nest, true);
1612 new_e->count = e->count;
1613 }
1614 /* Fix recursive calls.
1615 If OLD_VERSION has a recursive call after the
1616 previous edge cloning, the new version will have an edge
1617 pointing to the old version, which is wrong;
1618 Redirect it to point to the new version. */
1619 for (e = new_version->callees ; e; e = next_callee)
1620 {
1621 next_callee = e->next_callee;
1622 if (e->callee == old_version)
1623 cgraph_redirect_edge_callee (e, new_version);
1624
1625 if (!next_callee)
1626 break;
1627 }
1628 for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
1629 {
1630 /* Redirect calls to the old version node to point to its new
1631 version. */
1632 cgraph_redirect_edge_callee (e, new_version);
1633 }
1634
1635 return new_version;
1636 }
1637
1638 /* Perform function versioning.
1639 Function versioning includes copying of the tree and
1640 a callgraph update (creating a new cgraph node and updating
1641 its callees and callers).
1642
1643 REDIRECT_CALLERS varray includes the edges to be redirected
1644 to the new version.
1645
1646 TREE_MAP is a mapping of tree nodes we want to replace with
1647 new ones (according to results of prior analysis).
1648 OLD_VERSION_NODE is the node that is versioned.
1649 It returns the new version's cgraph node. */
1650
1651 struct cgraph_node *
1652 cgraph_function_versioning (struct cgraph_node *old_version_node,
1653 VEC(cgraph_edge_p,heap) *redirect_callers,
1654 varray_type tree_map)
1655 {
1656 tree old_decl = old_version_node->decl;
1657 struct cgraph_node *new_version_node = NULL;
1658 tree new_decl;
1659
1660 if (!tree_versionable_function_p (old_decl))
1661 return NULL;
1662
1663 /* Make a new FUNCTION_DECL tree node for the
1664 new version. */
1665 new_decl = copy_node (old_decl);
1666
1667 /* Create the new version's call-graph node.
1668 and update the edges of the new node. */
1669 new_version_node =
1670 cgraph_copy_node_for_versioning (old_version_node, new_decl,
1671 redirect_callers);
1672
1673 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1674 tree_function_versioning (old_decl, new_decl, tree_map, false);
1675 /* Update the call_expr on the edges to call the new version node. */
1676 update_call_expr (new_version_node);
1677
1678 /* Update the new version's properties.
1679 Make The new version visible only within this translation unit.
1680 ??? We cannot use COMDAT linkage because there is no
1681 ABI support for this. */
1682 DECL_EXTERNAL (new_version_node->decl) = 0;
1683 DECL_ONE_ONLY (new_version_node->decl) = 0;
1684 TREE_PUBLIC (new_version_node->decl) = 0;
1685 DECL_COMDAT (new_version_node->decl) = 0;
1686 new_version_node->local.externally_visible = 0;
1687 new_version_node->local.local = 1;
1688 new_version_node->lowered = true;
1689 return new_version_node;
1690 }
1691
1692 /* Produce separate function body for inline clones so the offline copy can be
1693 modified without affecting them. */
1694 struct cgraph_node *
1695 save_inline_function_body (struct cgraph_node *node)
1696 {
1697 struct cgraph_node *first_clone;
1698
1699 gcc_assert (node == cgraph_node (node->decl));
1700
1701 cgraph_lower_function (node);
1702
1703 /* In non-unit-at-a-time we construct full fledged clone we never output to
1704 assembly file. This clone is pointed out by inline_decl of original function
1705 and inlining infrastructure knows how to deal with this. */
1706 if (!flag_unit_at_a_time)
1707 {
1708 struct cgraph_edge *e;
1709
1710 first_clone = cgraph_clone_node (node, node->count, 0, false);
1711 first_clone->needed = 0;
1712 first_clone->reachable = 1;
1713 /* Recursively clone all bodies. */
1714 for (e = first_clone->callees; e; e = e->next_callee)
1715 if (!e->inline_failed)
1716 cgraph_clone_inlined_nodes (e, true, false);
1717 }
1718 else
1719 first_clone = node->next_clone;
1720
1721 first_clone->decl = copy_node (node->decl);
1722 node->next_clone = NULL;
1723 if (!flag_unit_at_a_time)
1724 node->inline_decl = first_clone->decl;
1725 first_clone->prev_clone = NULL;
1726 cgraph_insert_node_to_hashtable (first_clone);
1727 gcc_assert (first_clone == cgraph_node (first_clone->decl));
1728
1729 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1730 tree_function_versioning (node->decl, first_clone->decl, NULL, true);
1731
1732 DECL_EXTERNAL (first_clone->decl) = 0;
1733 DECL_ONE_ONLY (first_clone->decl) = 0;
1734 TREE_PUBLIC (first_clone->decl) = 0;
1735 DECL_COMDAT (first_clone->decl) = 0;
1736
1737 for (node = first_clone->next_clone; node; node = node->next_clone)
1738 node->decl = first_clone->decl;
1739 #ifdef ENABLE_CHECKING
1740 verify_cgraph_node (first_clone);
1741 #endif
1742 return first_clone;
1743 }
1744