re PR middle-end/25962 (Pointer (null) check after the use in cgraph.c)
[gcc.git] / gcc / cgraphunit.c
1 /* Callgraph based intraprocedural optimizations.
2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
21
22 /* This module implements main driver of compilation process as well as
23 few basic intraprocedural optimizers.
24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
35 (There is one exception needed for implementing GCC extern inline function.)
36
37 - cgraph_varpool_finalize_variable
38
39 This function has same behavior as the above but is used for static
40 variables.
41
42 - cgraph_finalize_compilation_unit
43
44 This function is called once compilation unit is finalized and it will
45 no longer change.
46
47 In the unit-at-a-time the call-graph construction and local function
48 analysis takes place here. Bodies of unreachable functions are released
49 to conserve memory usage.
50
51 ??? The compilation unit in this point of view should be compilation
52 unit as defined by the language - for instance C frontend allows multiple
53 compilation units to be parsed at once and it should call function each
54 time parsing is done so we save memory.
55
56 - cgraph_optimize
57
58 In this unit-at-a-time compilation the intra procedural analysis takes
59 place here. In particular the static functions whose address is never
60 taken are marked as local. Backend can then use this information to
61 modify calling conventions, do better inlining or similar optimizations.
62
63 - cgraph_assemble_pending_functions
64 - cgraph_varpool_assemble_pending_variables
65
66 In non-unit-at-a-time mode these functions can be used to force compilation
67 of functions or variables that are known to be needed at given stage
68 of compilation
69
70 - cgraph_mark_needed_node
71 - cgraph_varpool_mark_needed_node
72
73 When function or variable is referenced by some hidden way (for instance
74 via assembly code and marked by attribute "used"), the call-graph data structure
75 must be updated accordingly by this function.
76
77 - analyze_expr callback
78
79 This function is responsible for lowering tree nodes not understood by
80 generic code into understandable ones or alternatively marking
81 callgraph and varpool nodes referenced by the as needed.
82
83 ??? On the tree-ssa genericizing should take place here and we will avoid
84 need for these hooks (replacing them by genericizing hook)
85
86 - expand_function callback
87
88 This function is used to expand function and pass it into RTL back-end.
89 Front-end should not make any assumptions about when this function can be
90 called. In particular cgraph_assemble_pending_functions,
91 cgraph_varpool_assemble_pending_variables, cgraph_finalize_function,
92 cgraph_varpool_finalize_function, cgraph_optimize can cause arbitrarily
93 previously finalized functions to be expanded.
94
95 We implement two compilation modes.
96
97 - unit-at-a-time: In this mode analyzing of all functions is deferred
98 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
99
100 In cgraph_finalize_compilation_unit the reachable functions are
101 analyzed. During analysis the call-graph edges from reachable
102 functions are constructed and their destinations are marked as
103 reachable. References to functions and variables are discovered too
104 and variables found to be needed output to the assembly file. Via
105 mark_referenced call in assemble_variable functions referenced by
106 static variables are noticed too.
107
108 The intra-procedural information is produced and its existence
109 indicated by global_info_ready. Once this flag is set it is impossible
110 to change function from !reachable to reachable and thus
111 assemble_variable no longer call mark_referenced.
112
113 Finally the call-graph is topologically sorted and all reachable functions
114 that has not been completely inlined or are not external are output.
115
116 ??? It is possible that reference to function or variable is optimized
117 out. We can not deal with this nicely because topological order is not
118 suitable for it. For tree-ssa we may consider another pass doing
119 optimization and re-discovering reachable functions.
120
121 ??? Reorganize code so variables are output very last and only if they
122 really has been referenced by produced code, so we catch more cases
123 where reference has been optimized out.
124
125 - non-unit-at-a-time
126
127 All functions are variables are output as early as possible to conserve
128 memory consumption. This may or may not result in less memory used but
129 it is still needed for some legacy code that rely on particular ordering
130 of things output from the compiler.
131
132 Varpool data structures are not used and variables are output directly.
133
134 Functions are output early using call of
135 cgraph_assemble_pending_function from cgraph_finalize_function. The
136 decision on whether function is needed is made more conservative so
137 uninlininable static functions are needed too. During the call-graph
138 construction the edge destinations are not marked as reachable and it
139 is completely relied upn assemble_variable to mark them. */
140
141
142 #include "config.h"
143 #include "system.h"
144 #include "coretypes.h"
145 #include "tm.h"
146 #include "tree.h"
147 #include "rtl.h"
148 #include "tree-flow.h"
149 #include "tree-inline.h"
150 #include "langhooks.h"
151 #include "pointer-set.h"
152 #include "toplev.h"
153 #include "flags.h"
154 #include "ggc.h"
155 #include "debug.h"
156 #include "target.h"
157 #include "cgraph.h"
158 #include "diagnostic.h"
159 #include "timevar.h"
160 #include "params.h"
161 #include "fibheap.h"
162 #include "c-common.h"
163 #include "intl.h"
164 #include "function.h"
165 #include "ipa-prop.h"
166 #include "tree-gimple.h"
167 #include "tree-pass.h"
168 #include "output.h"
169
170 static void cgraph_expand_all_functions (void);
171 static void cgraph_mark_functions_to_output (void);
172 static void cgraph_expand_function (struct cgraph_node *);
173 static tree record_reference (tree *, int *, void *);
174 static void cgraph_output_pending_asms (void);
175
176 /* Records tree nodes seen in record_reference. Simply using
177 walk_tree_without_duplicates doesn't guarantee each node is visited
178 once because it gets a new htab upon each recursive call from
179 record_reference itself. */
180 static struct pointer_set_t *visited_nodes;
181
182 static FILE *cgraph_dump_file;
183
184 /* Determine if function DECL is needed. That is, visible to something
185 either outside this translation unit, something magic in the system
186 configury, or (if not doing unit-at-a-time) to something we havn't
187 seen yet. */
188
189 static bool
190 decide_is_function_needed (struct cgraph_node *node, tree decl)
191 {
192 tree origin;
193 if (MAIN_NAME_P (DECL_NAME (decl))
194 && TREE_PUBLIC (decl))
195 {
196 node->local.externally_visible = true;
197 return true;
198 }
199
200 /* If the user told us it is used, then it must be so. */
201 if (node->local.externally_visible
202 || lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
203 return true;
204
205 /* ??? If the assembler name is set by hand, it is possible to assemble
206 the name later after finalizing the function and the fact is noticed
207 in assemble_name then. This is arguably a bug. */
208 if (DECL_ASSEMBLER_NAME_SET_P (decl)
209 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
210 return true;
211
212 /* If we decided it was needed before, but at the time we didn't have
213 the body of the function available, then it's still needed. We have
214 to go back and re-check its dependencies now. */
215 if (node->needed)
216 return true;
217
218 /* Externally visible functions must be output. The exception is
219 COMDAT functions that must be output only when they are needed. */
220 if ((TREE_PUBLIC (decl) && !flag_whole_program)
221 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
222 return true;
223
224 /* Constructors and destructors are reachable from the runtime by
225 some mechanism. */
226 if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
227 return true;
228
229 if (flag_unit_at_a_time)
230 return false;
231
232 /* If not doing unit at a time, then we'll only defer this function
233 if its marked for inlining. Otherwise we want to emit it now. */
234
235 /* "extern inline" functions are never output locally. */
236 if (DECL_EXTERNAL (decl))
237 return false;
238 /* Nested functions of extern inline function shall not be emit unless
239 we inlined the origin. */
240 for (origin = decl_function_context (decl); origin;
241 origin = decl_function_context (origin))
242 if (DECL_EXTERNAL (origin))
243 return false;
244 /* We want to emit COMDAT functions only when absolutely necessary. */
245 if (DECL_COMDAT (decl))
246 return false;
247 if (!DECL_INLINE (decl)
248 || (!node->local.disregard_inline_limits
249 /* When declared inline, defer even the uninlinable functions.
250 This allows them to be eliminated when unused. */
251 && !DECL_DECLARED_INLINE_P (decl)
252 && (!node->local.inlinable || !cgraph_default_inline_p (node, NULL))))
253 return true;
254
255 return false;
256 }
257
258 /* Walk the decls we marked as necessary and see if they reference new
259 variables or functions and add them into the worklists. */
260 static bool
261 cgraph_varpool_analyze_pending_decls (void)
262 {
263 bool changed = false;
264 timevar_push (TV_CGRAPH);
265
266 while (cgraph_varpool_first_unanalyzed_node)
267 {
268 tree decl = cgraph_varpool_first_unanalyzed_node->decl;
269
270 cgraph_varpool_first_unanalyzed_node->analyzed = true;
271
272 cgraph_varpool_first_unanalyzed_node = cgraph_varpool_first_unanalyzed_node->next_needed;
273
274 if (DECL_INITIAL (decl))
275 {
276 visited_nodes = pointer_set_create ();
277 walk_tree (&DECL_INITIAL (decl), record_reference, NULL, visited_nodes);
278 pointer_set_destroy (visited_nodes);
279 visited_nodes = NULL;
280 }
281 changed = true;
282 }
283 timevar_pop (TV_CGRAPH);
284 return changed;
285 }
286
287 /* Optimization of function bodies might've rendered some variables as
288 unnecessary so we want to avoid these from being compiled.
289
290 This is done by pruning the queue and keeping only the variables that
291 really appear needed (ie they are either externally visible or referenced
292 by compiled function). Re-doing the reachability analysis on variables
293 brings back the remaining variables referenced by these. */
294 static void
295 cgraph_varpool_remove_unreferenced_decls (void)
296 {
297 struct cgraph_varpool_node *next, *node = cgraph_varpool_nodes_queue;
298
299 cgraph_varpool_reset_queue ();
300
301 if (errorcount || sorrycount)
302 return;
303
304 while (node)
305 {
306 tree decl = node->decl;
307 next = node->next_needed;
308 node->needed = 0;
309
310 if (node->finalized
311 && ((DECL_ASSEMBLER_NAME_SET_P (decl)
312 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
313 || node->force_output
314 || decide_is_variable_needed (node, decl)
315 /* ??? Cgraph does not yet rule the world with an iron hand,
316 and does not control the emission of debug information.
317 After a variable has its DECL_RTL set, we must assume that
318 it may be referenced by the debug information, and we can
319 no longer elide it. */
320 || DECL_RTL_SET_P (decl)))
321 cgraph_varpool_mark_needed_node (node);
322
323 node = next;
324 }
325 /* Make sure we mark alias targets as used targets. */
326 finish_aliases_1 ();
327 cgraph_varpool_analyze_pending_decls ();
328 }
329
330
331 /* When not doing unit-at-a-time, output all functions enqueued.
332 Return true when such a functions were found. */
333
334 bool
335 cgraph_assemble_pending_functions (void)
336 {
337 bool output = false;
338
339 if (flag_unit_at_a_time)
340 return false;
341
342 cgraph_output_pending_asms ();
343
344 while (cgraph_nodes_queue)
345 {
346 struct cgraph_node *n = cgraph_nodes_queue;
347
348 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
349 n->next_needed = NULL;
350 if (!n->global.inlined_to
351 && !n->alias
352 && !DECL_EXTERNAL (n->decl))
353 {
354 cgraph_expand_function (n);
355 output = true;
356 }
357 }
358
359 /* Process CGRAPH_EXPAND_QUEUE, these are functions created during
360 the expansion process. Note that this queue may grow as its
361 being processed, as the new functions may generate new ones. */
362 while (cgraph_expand_queue)
363 {
364 struct cgraph_node *n = cgraph_expand_queue;
365 cgraph_expand_queue = cgraph_expand_queue->next_needed;
366 n->next_needed = NULL;
367 cgraph_finalize_function (n->decl, false);
368 output = true;
369 }
370
371 return output;
372 }
373
374
375 /* As an GCC extension we allow redefinition of the function. The
376 semantics when both copies of bodies differ is not well defined.
377 We replace the old body with new body so in unit at a time mode
378 we always use new body, while in normal mode we may end up with
379 old body inlined into some functions and new body expanded and
380 inlined in others.
381
382 ??? It may make more sense to use one body for inlining and other
383 body for expanding the function but this is difficult to do. */
384
385 static void
386 cgraph_reset_node (struct cgraph_node *node)
387 {
388 /* If node->output is set, then this is a unit-at-a-time compilation
389 and we have already begun whole-unit analysis. This is *not*
390 testing for whether we've already emitted the function. That
391 case can be sort-of legitimately seen with real function
392 redefinition errors. I would argue that the front end should
393 never present us with such a case, but don't enforce that for now. */
394 gcc_assert (!node->output);
395
396 /* Reset our data structures so we can analyze the function again. */
397 memset (&node->local, 0, sizeof (node->local));
398 memset (&node->global, 0, sizeof (node->global));
399 memset (&node->rtl, 0, sizeof (node->rtl));
400 node->analyzed = false;
401 node->local.redefined_extern_inline = true;
402 node->local.finalized = false;
403
404 if (!flag_unit_at_a_time)
405 {
406 struct cgraph_node *n;
407
408 for (n = cgraph_nodes; n; n = n->next)
409 if (n->global.inlined_to == node)
410 cgraph_remove_node (n);
411 }
412
413 cgraph_node_remove_callees (node);
414
415 /* We may need to re-queue the node for assembling in case
416 we already proceeded it and ignored as not needed. */
417 if (node->reachable && !flag_unit_at_a_time)
418 {
419 struct cgraph_node *n;
420
421 for (n = cgraph_nodes_queue; n; n = n->next_needed)
422 if (n == node)
423 break;
424 if (!n)
425 node->reachable = 0;
426 }
427 }
428
429 static void
430 cgraph_lower_function (struct cgraph_node *node)
431 {
432 if (node->lowered)
433 return;
434 tree_lowering_passes (node->decl);
435 node->lowered = true;
436 }
437
438 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
439 logic in effect. If NESTED is true, then our caller cannot stand to have
440 the garbage collector run at the moment. We would need to either create
441 a new GC context, or just not compile right now. */
442
443 void
444 cgraph_finalize_function (tree decl, bool nested)
445 {
446 struct cgraph_node *node = cgraph_node (decl);
447
448 if (node->local.finalized)
449 cgraph_reset_node (node);
450
451 notice_global_symbol (decl);
452 node->decl = decl;
453 node->local.finalized = true;
454 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
455 if (node->nested)
456 lower_nested_functions (decl);
457 gcc_assert (!node->nested);
458
459 /* If not unit at a time, then we need to create the call graph
460 now, so that called functions can be queued and emitted now. */
461 if (!flag_unit_at_a_time)
462 {
463 cgraph_analyze_function (node);
464 cgraph_decide_inlining_incrementally (node, false);
465 }
466
467 if (decide_is_function_needed (node, decl))
468 cgraph_mark_needed_node (node);
469
470 /* Since we reclaim unreachable nodes at the end of every language
471 level unit, we need to be conservative about possible entry points
472 there. */
473 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)))
474 cgraph_mark_reachable_node (node);
475
476 /* If not unit at a time, go ahead and emit everything we've found
477 to be reachable at this time. */
478 if (!nested)
479 {
480 if (!cgraph_assemble_pending_functions ())
481 ggc_collect ();
482 }
483
484 /* If we've not yet emitted decl, tell the debug info about it. */
485 if (!TREE_ASM_WRITTEN (decl))
486 (*debug_hooks->deferred_inline_function) (decl);
487
488 /* Possibly warn about unused parameters. */
489 if (warn_unused_parameter)
490 do_warn_unused_parameter (decl);
491 }
492
493 /* Walk tree and record all calls. Called via walk_tree. */
494 static tree
495 record_reference (tree *tp, int *walk_subtrees, void *data)
496 {
497 tree t = *tp;
498
499 switch (TREE_CODE (t))
500 {
501 case VAR_DECL:
502 /* ??? Really, we should mark this decl as *potentially* referenced
503 by this function and re-examine whether the decl is actually used
504 after rtl has been generated. */
505 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
506 {
507 cgraph_varpool_mark_needed_node (cgraph_varpool_node (t));
508 if (lang_hooks.callgraph.analyze_expr)
509 return lang_hooks.callgraph.analyze_expr (tp, walk_subtrees,
510 data);
511 }
512 break;
513
514 case FDESC_EXPR:
515 case ADDR_EXPR:
516 if (flag_unit_at_a_time)
517 {
518 /* Record dereferences to the functions. This makes the
519 functions reachable unconditionally. */
520 tree decl = TREE_OPERAND (*tp, 0);
521 if (TREE_CODE (decl) == FUNCTION_DECL)
522 cgraph_mark_needed_node (cgraph_node (decl));
523 }
524 break;
525
526 default:
527 /* Save some cycles by not walking types and declaration as we
528 won't find anything useful there anyway. */
529 if (IS_TYPE_OR_DECL_P (*tp))
530 {
531 *walk_subtrees = 0;
532 break;
533 }
534
535 if ((unsigned int) TREE_CODE (t) >= LAST_AND_UNUSED_TREE_CODE)
536 return lang_hooks.callgraph.analyze_expr (tp, walk_subtrees, data);
537 break;
538 }
539
540 return NULL;
541 }
542
543 /* Create cgraph edges for function calls inside BODY from NODE. */
544
545 static void
546 cgraph_create_edges (struct cgraph_node *node, tree body)
547 {
548 basic_block bb;
549
550 struct function *this_cfun = DECL_STRUCT_FUNCTION (body);
551 block_stmt_iterator bsi;
552 tree step;
553 visited_nodes = pointer_set_create ();
554
555 /* Reach the trees by walking over the CFG, and note the
556 enclosing basic-blocks in the call edges. */
557 FOR_EACH_BB_FN (bb, this_cfun)
558 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
559 {
560 tree stmt = bsi_stmt (bsi);
561 tree call = get_call_expr_in (stmt);
562 tree decl;
563
564 if (call && (decl = get_callee_fndecl (call)))
565 {
566 cgraph_create_edge (node, cgraph_node (decl), stmt,
567 bb->count,
568 bb->loop_depth);
569 walk_tree (&TREE_OPERAND (call, 1),
570 record_reference, node, visited_nodes);
571 if (TREE_CODE (stmt) == MODIFY_EXPR)
572 walk_tree (&TREE_OPERAND (stmt, 0),
573 record_reference, node, visited_nodes);
574 }
575 else
576 walk_tree (bsi_stmt_ptr (bsi), record_reference, node, visited_nodes);
577 }
578
579 /* Look for initializers of constant variables and private statics. */
580 for (step = DECL_STRUCT_FUNCTION (body)->unexpanded_var_list;
581 step;
582 step = TREE_CHAIN (step))
583 {
584 tree decl = TREE_VALUE (step);
585 if (TREE_CODE (decl) == VAR_DECL
586 && (TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
587 && flag_unit_at_a_time)
588 cgraph_varpool_finalize_decl (decl);
589 else if (TREE_CODE (decl) == VAR_DECL && DECL_INITIAL (decl))
590 walk_tree (&DECL_INITIAL (decl), record_reference, node, visited_nodes);
591 }
592
593 pointer_set_destroy (visited_nodes);
594 visited_nodes = NULL;
595 }
596
597 /* Give initial reasons why inlining would fail. Those gets
598 either NULLified or usually overwritten by more precise reason
599 later. */
600 static void
601 initialize_inline_failed (struct cgraph_node *node)
602 {
603 struct cgraph_edge *e;
604
605 for (e = node->callers; e; e = e->next_caller)
606 {
607 gcc_assert (!e->callee->global.inlined_to);
608 gcc_assert (e->inline_failed);
609 if (node->local.redefined_extern_inline)
610 e->inline_failed = N_("redefined extern inline functions are not "
611 "considered for inlining");
612 else if (!node->local.inlinable)
613 e->inline_failed = N_("function not inlinable");
614 else
615 e->inline_failed = N_("function not considered for inlining");
616 }
617 }
618
619 /* Rebuild call edges from current function after a passes not aware
620 of cgraph updating. */
621 static unsigned int
622 rebuild_cgraph_edges (void)
623 {
624 basic_block bb;
625 struct cgraph_node *node = cgraph_node (current_function_decl);
626 block_stmt_iterator bsi;
627
628 cgraph_node_remove_callees (node);
629
630 node->count = ENTRY_BLOCK_PTR->count;
631
632 FOR_EACH_BB (bb)
633 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
634 {
635 tree stmt = bsi_stmt (bsi);
636 tree call = get_call_expr_in (stmt);
637 tree decl;
638
639 if (call && (decl = get_callee_fndecl (call)))
640 cgraph_create_edge (node, cgraph_node (decl), stmt,
641 bb->count,
642 bb->loop_depth);
643 }
644 initialize_inline_failed (node);
645 gcc_assert (!node->global.inlined_to);
646 return 0;
647 }
648
649 struct tree_opt_pass pass_rebuild_cgraph_edges =
650 {
651 NULL, /* name */
652 NULL, /* gate */
653 rebuild_cgraph_edges, /* execute */
654 NULL, /* sub */
655 NULL, /* next */
656 0, /* static_pass_number */
657 0, /* tv_id */
658 PROP_cfg, /* properties_required */
659 0, /* properties_provided */
660 0, /* properties_destroyed */
661 0, /* todo_flags_start */
662 0, /* todo_flags_finish */
663 0 /* letter */
664 };
665
666 /* Verify cgraph nodes of given cgraph node. */
667 void
668 verify_cgraph_node (struct cgraph_node *node)
669 {
670 struct cgraph_edge *e;
671 struct cgraph_node *main_clone;
672 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
673 basic_block this_block;
674 block_stmt_iterator bsi;
675 bool error_found = false;
676
677 timevar_push (TV_CGRAPH_VERIFY);
678 for (e = node->callees; e; e = e->next_callee)
679 if (e->aux)
680 {
681 error ("aux field set for edge %s->%s",
682 cgraph_node_name (e->caller), cgraph_node_name (e->callee));
683 error_found = true;
684 }
685 if (node->count < 0)
686 {
687 error ("Execution count is negative");
688 error_found = true;
689 }
690 for (e = node->callers; e; e = e->next_caller)
691 {
692 if (e->count < 0)
693 {
694 error ("caller edge count is negative");
695 error_found = true;
696 }
697 if (!e->inline_failed)
698 {
699 if (node->global.inlined_to
700 != (e->caller->global.inlined_to
701 ? e->caller->global.inlined_to : e->caller))
702 {
703 error ("inlined_to pointer is wrong");
704 error_found = true;
705 }
706 if (node->callers->next_caller)
707 {
708 error ("multiple inline callers");
709 error_found = true;
710 }
711 }
712 else
713 if (node->global.inlined_to)
714 {
715 error ("inlined_to pointer set for noninline callers");
716 error_found = true;
717 }
718 }
719 if (!node->callers && node->global.inlined_to)
720 {
721 error ("inlined_to pointer is set but no predecesors found");
722 error_found = true;
723 }
724 if (node->global.inlined_to == node)
725 {
726 error ("inlined_to pointer refers to itself");
727 error_found = true;
728 }
729
730 for (main_clone = cgraph_node (node->decl); main_clone;
731 main_clone = main_clone->next_clone)
732 if (main_clone == node)
733 break;
734 if (!cgraph_node (node->decl))
735 {
736 error ("node not found in cgraph_hash");
737 error_found = true;
738 }
739
740 if (node->analyzed
741 && DECL_SAVED_TREE (node->decl) && !TREE_ASM_WRITTEN (node->decl)
742 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to))
743 {
744 if (this_cfun->cfg)
745 {
746 /* The nodes we're interested in are never shared, so walk
747 the tree ignoring duplicates. */
748 visited_nodes = pointer_set_create ();
749 /* Reach the trees by walking over the CFG, and note the
750 enclosing basic-blocks in the call edges. */
751 FOR_EACH_BB_FN (this_block, this_cfun)
752 for (bsi = bsi_start (this_block); !bsi_end_p (bsi); bsi_next (&bsi))
753 {
754 tree stmt = bsi_stmt (bsi);
755 tree call = get_call_expr_in (stmt);
756 tree decl;
757 if (call && (decl = get_callee_fndecl (call)))
758 {
759 struct cgraph_edge *e = cgraph_edge (node, stmt);
760 if (e)
761 {
762 if (e->aux)
763 {
764 error ("shared call_stmt:");
765 debug_generic_stmt (stmt);
766 error_found = true;
767 }
768 if (e->callee->decl != cgraph_node (decl)->decl
769 && e->inline_failed)
770 {
771 error ("edge points to wrong declaration:");
772 debug_tree (e->callee->decl);
773 fprintf (stderr," Instead of:");
774 debug_tree (decl);
775 }
776 e->aux = (void *)1;
777 }
778 else
779 {
780 error ("missing callgraph edge for call stmt:");
781 debug_generic_stmt (stmt);
782 error_found = true;
783 }
784 }
785 }
786 pointer_set_destroy (visited_nodes);
787 visited_nodes = NULL;
788 }
789 else
790 /* No CFG available?! */
791 gcc_unreachable ();
792
793 for (e = node->callees; e; e = e->next_callee)
794 {
795 if (!e->aux)
796 {
797 error ("edge %s->%s has no corresponding call_stmt",
798 cgraph_node_name (e->caller),
799 cgraph_node_name (e->callee));
800 debug_generic_stmt (e->call_stmt);
801 error_found = true;
802 }
803 e->aux = 0;
804 }
805 }
806 if (error_found)
807 {
808 dump_cgraph_node (stderr, node);
809 internal_error ("verify_cgraph_node failed");
810 }
811 timevar_pop (TV_CGRAPH_VERIFY);
812 }
813
814 /* Verify whole cgraph structure. */
815 void
816 verify_cgraph (void)
817 {
818 struct cgraph_node *node;
819
820 if (sorrycount || errorcount)
821 return;
822
823 for (node = cgraph_nodes; node; node = node->next)
824 verify_cgraph_node (node);
825 }
826
827 /* Output one variable, if necessary. Return whether we output it. */
828 static bool
829 cgraph_varpool_assemble_decl (struct cgraph_varpool_node *node)
830 {
831 tree decl = node->decl;
832
833 if (!TREE_ASM_WRITTEN (decl)
834 && !node->alias
835 && !DECL_EXTERNAL (decl)
836 && (TREE_CODE (decl) != VAR_DECL || !DECL_HAS_VALUE_EXPR_P (decl)))
837 {
838 assemble_variable (decl, 0, 1, 0);
839 /* Local static variables are never seen by check_global_declarations
840 so we need to output debug info by hand. */
841 if (DECL_CONTEXT (decl)
842 && (TREE_CODE (DECL_CONTEXT (decl)) == BLOCK
843 || TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
844 && errorcount == 0 && sorrycount == 0)
845 {
846 timevar_push (TV_SYMOUT);
847 (*debug_hooks->global_decl) (decl);
848 timevar_pop (TV_SYMOUT);
849 }
850 return true;
851 }
852
853 return false;
854 }
855
856 /* Output all variables enqueued to be assembled. */
857 bool
858 cgraph_varpool_assemble_pending_decls (void)
859 {
860 bool changed = false;
861
862 if (errorcount || sorrycount)
863 return false;
864
865 /* EH might mark decls as needed during expansion. This should be safe since
866 we don't create references to new function, but it should not be used
867 elsewhere. */
868 cgraph_varpool_analyze_pending_decls ();
869
870 while (cgraph_varpool_nodes_queue)
871 {
872 struct cgraph_varpool_node *node = cgraph_varpool_nodes_queue;
873
874 cgraph_varpool_nodes_queue = cgraph_varpool_nodes_queue->next_needed;
875 if (cgraph_varpool_assemble_decl (node))
876 changed = true;
877 node->next_needed = NULL;
878 }
879 return changed;
880 }
881
882 /* Output all asm statements we have stored up to be output. */
883
884 static void
885 cgraph_output_pending_asms (void)
886 {
887 struct cgraph_asm_node *can;
888
889 if (errorcount || sorrycount)
890 return;
891
892 for (can = cgraph_asm_nodes; can; can = can->next)
893 assemble_asm (can->asm_str);
894 cgraph_asm_nodes = NULL;
895 }
896
897 /* Analyze the function scheduled to be output. */
898 void
899 cgraph_analyze_function (struct cgraph_node *node)
900 {
901 tree decl = node->decl;
902
903 current_function_decl = decl;
904 push_cfun (DECL_STRUCT_FUNCTION (decl));
905 cgraph_lower_function (node);
906
907 /* First kill forward declaration so reverse inlining works properly. */
908 cgraph_create_edges (node, decl);
909
910 node->local.inlinable = tree_inlinable_function_p (decl);
911 node->local.self_insns = estimate_num_insns (decl);
912 if (node->local.inlinable)
913 node->local.disregard_inline_limits
914 = lang_hooks.tree_inlining.disregard_inline_limits (decl);
915 initialize_inline_failed (node);
916 if (flag_really_no_inline && !node->local.disregard_inline_limits)
917 node->local.inlinable = 0;
918 /* Inlining characteristics are maintained by the cgraph_mark_inline. */
919 node->global.insns = node->local.self_insns;
920
921 node->analyzed = true;
922 pop_cfun ();
923 current_function_decl = NULL;
924 }
925
926 /* Analyze the whole compilation unit once it is parsed completely. */
927
928 void
929 cgraph_finalize_compilation_unit (void)
930 {
931 struct cgraph_node *node;
932 /* Keep track of already processed nodes when called multiple times for
933 intermodule optimization. */
934 static struct cgraph_node *first_analyzed;
935
936 finish_aliases_1 ();
937
938 if (!flag_unit_at_a_time)
939 {
940 cgraph_output_pending_asms ();
941 cgraph_assemble_pending_functions ();
942 return;
943 }
944
945 if (!quiet_flag)
946 {
947 fprintf (stderr, "\nAnalyzing compilation unit");
948 fflush (stderr);
949 }
950
951 timevar_push (TV_CGRAPH);
952 cgraph_varpool_analyze_pending_decls ();
953 if (cgraph_dump_file)
954 {
955 fprintf (cgraph_dump_file, "Initial entry points:");
956 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
957 if (node->needed && DECL_SAVED_TREE (node->decl))
958 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
959 fprintf (cgraph_dump_file, "\n");
960 }
961
962 /* Propagate reachability flag and lower representation of all reachable
963 functions. In the future, lowering will introduce new functions and
964 new entry points on the way (by template instantiation and virtual
965 method table generation for instance). */
966 while (cgraph_nodes_queue)
967 {
968 struct cgraph_edge *edge;
969 tree decl = cgraph_nodes_queue->decl;
970
971 node = cgraph_nodes_queue;
972 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
973 node->next_needed = NULL;
974
975 /* ??? It is possible to create extern inline function and later using
976 weak alias attribute to kill its body. See
977 gcc.c-torture/compile/20011119-1.c */
978 if (!DECL_SAVED_TREE (decl))
979 {
980 cgraph_reset_node (node);
981 continue;
982 }
983
984 gcc_assert (!node->analyzed && node->reachable);
985 gcc_assert (DECL_SAVED_TREE (decl));
986
987 cgraph_analyze_function (node);
988
989 for (edge = node->callees; edge; edge = edge->next_callee)
990 if (!edge->callee->reachable)
991 cgraph_mark_reachable_node (edge->callee);
992
993 cgraph_varpool_analyze_pending_decls ();
994 }
995
996 /* Collect entry points to the unit. */
997
998 if (cgraph_dump_file)
999 {
1000 fprintf (cgraph_dump_file, "Unit entry points:");
1001 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1002 if (node->needed && DECL_SAVED_TREE (node->decl))
1003 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1004 fprintf (cgraph_dump_file, "\n\nInitial ");
1005 dump_cgraph (cgraph_dump_file);
1006 }
1007
1008 if (cgraph_dump_file)
1009 fprintf (cgraph_dump_file, "\nReclaiming functions:");
1010
1011 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1012 {
1013 tree decl = node->decl;
1014
1015 if (node->local.finalized && !DECL_SAVED_TREE (decl))
1016 cgraph_reset_node (node);
1017
1018 if (!node->reachable && DECL_SAVED_TREE (decl))
1019 {
1020 if (cgraph_dump_file)
1021 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1022 cgraph_remove_node (node);
1023 continue;
1024 }
1025 else
1026 node->next_needed = NULL;
1027 gcc_assert (!node->local.finalized || DECL_SAVED_TREE (decl));
1028 gcc_assert (node->analyzed == node->local.finalized);
1029 }
1030 if (cgraph_dump_file)
1031 {
1032 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1033 dump_cgraph (cgraph_dump_file);
1034 }
1035 first_analyzed = cgraph_nodes;
1036 ggc_collect ();
1037 timevar_pop (TV_CGRAPH);
1038 }
1039 /* Figure out what functions we want to assemble. */
1040
1041 static void
1042 cgraph_mark_functions_to_output (void)
1043 {
1044 struct cgraph_node *node;
1045
1046 for (node = cgraph_nodes; node; node = node->next)
1047 {
1048 tree decl = node->decl;
1049 struct cgraph_edge *e;
1050
1051 gcc_assert (!node->output);
1052
1053 for (e = node->callers; e; e = e->next_caller)
1054 if (e->inline_failed)
1055 break;
1056
1057 /* We need to output all local functions that are used and not
1058 always inlined, as well as those that are reachable from
1059 outside the current compilation unit. */
1060 if (DECL_SAVED_TREE (decl)
1061 && !node->global.inlined_to
1062 && (node->needed
1063 || (e && node->reachable))
1064 && !TREE_ASM_WRITTEN (decl)
1065 && !DECL_EXTERNAL (decl))
1066 node->output = 1;
1067 else
1068 {
1069 /* We should've reclaimed all functions that are not needed. */
1070 #ifdef ENABLE_CHECKING
1071 if (!node->global.inlined_to && DECL_SAVED_TREE (decl)
1072 && !DECL_EXTERNAL (decl))
1073 {
1074 dump_cgraph_node (stderr, node);
1075 internal_error ("failed to reclaim unneeded function");
1076 }
1077 #endif
1078 gcc_assert (node->global.inlined_to || !DECL_SAVED_TREE (decl)
1079 || DECL_EXTERNAL (decl));
1080
1081 }
1082
1083 }
1084 }
1085
1086 /* Expand function specified by NODE. */
1087
1088 static void
1089 cgraph_expand_function (struct cgraph_node *node)
1090 {
1091 tree decl = node->decl;
1092
1093 /* We ought to not compile any inline clones. */
1094 gcc_assert (!node->global.inlined_to);
1095
1096 if (flag_unit_at_a_time)
1097 announce_function (decl);
1098
1099 cgraph_lower_function (node);
1100
1101 /* Generate RTL for the body of DECL. */
1102 lang_hooks.callgraph.expand_function (decl);
1103
1104 /* Make sure that BE didn't give up on compiling. */
1105 /* ??? Can happen with nested function of extern inline. */
1106 gcc_assert (TREE_ASM_WRITTEN (node->decl));
1107
1108 current_function_decl = NULL;
1109 if (!cgraph_preserve_function_body_p (node->decl))
1110 {
1111 DECL_SAVED_TREE (node->decl) = NULL;
1112 DECL_STRUCT_FUNCTION (node->decl) = NULL;
1113 DECL_INITIAL (node->decl) = error_mark_node;
1114 /* Eliminate all call edges. This is important so the call_expr no longer
1115 points to the dead function body. */
1116 cgraph_node_remove_callees (node);
1117 }
1118
1119 cgraph_function_flags_ready = true;
1120 }
1121
1122 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
1123
1124 bool
1125 cgraph_inline_p (struct cgraph_edge *e, const char **reason)
1126 {
1127 *reason = e->inline_failed;
1128 return !e->inline_failed;
1129 }
1130
1131
1132
1133 /* Expand all functions that must be output.
1134
1135 Attempt to topologically sort the nodes so function is output when
1136 all called functions are already assembled to allow data to be
1137 propagated across the callgraph. Use a stack to get smaller distance
1138 between a function and its callees (later we may choose to use a more
1139 sophisticated algorithm for function reordering; we will likely want
1140 to use subsections to make the output functions appear in top-down
1141 order). */
1142
1143 static void
1144 cgraph_expand_all_functions (void)
1145 {
1146 struct cgraph_node *node;
1147 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1148 int order_pos = 0, new_order_pos = 0;
1149 int i;
1150
1151 order_pos = cgraph_postorder (order);
1152 gcc_assert (order_pos == cgraph_n_nodes);
1153
1154 /* Garbage collector may remove inline clones we eliminate during
1155 optimization. So we must be sure to not reference them. */
1156 for (i = 0; i < order_pos; i++)
1157 if (order[i]->output)
1158 order[new_order_pos++] = order[i];
1159
1160 for (i = new_order_pos - 1; i >= 0; i--)
1161 {
1162 node = order[i];
1163 if (node->output)
1164 {
1165 gcc_assert (node->reachable);
1166 node->output = 0;
1167 cgraph_expand_function (node);
1168 }
1169 }
1170
1171 free (order);
1172
1173 /* Process CGRAPH_EXPAND_QUEUE, these are functions created during
1174 the expansion process. Note that this queue may grow as its
1175 being processed, as the new functions may generate new ones. */
1176 while (cgraph_expand_queue)
1177 {
1178 node = cgraph_expand_queue;
1179 cgraph_expand_queue = cgraph_expand_queue->next_needed;
1180 node->next_needed = NULL;
1181 node->output = 0;
1182 node->lowered = DECL_STRUCT_FUNCTION (node->decl)->cfg != NULL;
1183 cgraph_expand_function (node);
1184 }
1185 }
1186
1187 /* This is used to sort the node types by the cgraph order number. */
1188
1189 struct cgraph_order_sort
1190 {
1191 enum { ORDER_UNDEFINED = 0, ORDER_FUNCTION, ORDER_VAR, ORDER_ASM } kind;
1192 union
1193 {
1194 struct cgraph_node *f;
1195 struct cgraph_varpool_node *v;
1196 struct cgraph_asm_node *a;
1197 } u;
1198 };
1199
1200 /* Output all functions, variables, and asm statements in the order
1201 according to their order fields, which is the order in which they
1202 appeared in the file. This implements -fno-toplevel-reorder. In
1203 this mode we may output functions and variables which don't really
1204 need to be output. */
1205
1206 static void
1207 cgraph_output_in_order (void)
1208 {
1209 int max;
1210 size_t size;
1211 struct cgraph_order_sort *nodes;
1212 int i;
1213 struct cgraph_node *pf;
1214 struct cgraph_varpool_node *pv;
1215 struct cgraph_asm_node *pa;
1216
1217 max = cgraph_order;
1218 size = max * sizeof (struct cgraph_order_sort);
1219 nodes = (struct cgraph_order_sort *) alloca (size);
1220 memset (nodes, 0, size);
1221
1222 cgraph_varpool_analyze_pending_decls ();
1223
1224 for (pf = cgraph_nodes; pf; pf = pf->next)
1225 {
1226 if (pf->output)
1227 {
1228 i = pf->order;
1229 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1230 nodes[i].kind = ORDER_FUNCTION;
1231 nodes[i].u.f = pf;
1232 }
1233 }
1234
1235 for (pv = cgraph_varpool_nodes_queue; pv; pv = pv->next_needed)
1236 {
1237 i = pv->order;
1238 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1239 nodes[i].kind = ORDER_VAR;
1240 nodes[i].u.v = pv;
1241 }
1242
1243 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1244 {
1245 i = pa->order;
1246 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1247 nodes[i].kind = ORDER_ASM;
1248 nodes[i].u.a = pa;
1249 }
1250
1251 for (i = 0; i < max; ++i)
1252 {
1253 switch (nodes[i].kind)
1254 {
1255 case ORDER_FUNCTION:
1256 nodes[i].u.f->output = 0;
1257 cgraph_expand_function (nodes[i].u.f);
1258 break;
1259
1260 case ORDER_VAR:
1261 cgraph_varpool_assemble_decl (nodes[i].u.v);
1262 break;
1263
1264 case ORDER_ASM:
1265 assemble_asm (nodes[i].u.a->asm_str);
1266 break;
1267
1268 case ORDER_UNDEFINED:
1269 break;
1270
1271 default:
1272 gcc_unreachable ();
1273 }
1274 }
1275
1276 cgraph_asm_nodes = NULL;
1277 }
1278
1279 /* Mark visibility of all functions.
1280
1281 A local function is one whose calls can occur only in the current
1282 compilation unit and all its calls are explicit, so we can change
1283 its calling convention. We simply mark all static functions whose
1284 address is not taken as local.
1285
1286 We also change the TREE_PUBLIC flag of all declarations that are public
1287 in language point of view but we want to overwrite this default
1288 via visibilities for the backend point of view. */
1289
1290 static void
1291 cgraph_function_and_variable_visibility (void)
1292 {
1293 struct cgraph_node *node;
1294 struct cgraph_varpool_node *vnode;
1295
1296 for (node = cgraph_nodes; node; node = node->next)
1297 {
1298 if (node->reachable
1299 && (DECL_COMDAT (node->decl)
1300 || (!flag_whole_program
1301 && TREE_PUBLIC (node->decl) && !DECL_EXTERNAL (node->decl))))
1302 node->local.externally_visible = true;
1303 if (!node->local.externally_visible && node->analyzed
1304 && !DECL_EXTERNAL (node->decl))
1305 {
1306 gcc_assert (flag_whole_program || !TREE_PUBLIC (node->decl));
1307 TREE_PUBLIC (node->decl) = 0;
1308 }
1309 node->local.local = (!node->needed
1310 && node->analyzed
1311 && !DECL_EXTERNAL (node->decl)
1312 && !node->local.externally_visible);
1313 }
1314 for (vnode = cgraph_varpool_nodes_queue; vnode; vnode = vnode->next_needed)
1315 {
1316 if (vnode->needed
1317 && !flag_whole_program
1318 && (DECL_COMDAT (vnode->decl) || TREE_PUBLIC (vnode->decl)))
1319 vnode->externally_visible = 1;
1320 if (!vnode->externally_visible)
1321 {
1322 gcc_assert (flag_whole_program || !TREE_PUBLIC (vnode->decl));
1323 TREE_PUBLIC (vnode->decl) = 0;
1324 }
1325 gcc_assert (TREE_STATIC (vnode->decl));
1326 }
1327
1328 /* Because we have to be conservative on the boundaries of source
1329 level units, it is possible that we marked some functions in
1330 reachable just because they might be used later via external
1331 linkage, but after making them local they are really unreachable
1332 now. */
1333 cgraph_remove_unreachable_nodes (true, cgraph_dump_file);
1334
1335 if (cgraph_dump_file)
1336 {
1337 fprintf (cgraph_dump_file, "\nMarking local functions:");
1338 for (node = cgraph_nodes; node; node = node->next)
1339 if (node->local.local)
1340 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1341 fprintf (cgraph_dump_file, "\n\n");
1342 fprintf (cgraph_dump_file, "\nMarking externally visible functions:");
1343 for (node = cgraph_nodes; node; node = node->next)
1344 if (node->local.externally_visible)
1345 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1346 fprintf (cgraph_dump_file, "\n\n");
1347 }
1348 cgraph_function_flags_ready = true;
1349 }
1350
1351 /* Return true when function body of DECL still needs to be kept around
1352 for later re-use. */
1353 bool
1354 cgraph_preserve_function_body_p (tree decl)
1355 {
1356 struct cgraph_node *node;
1357 if (!cgraph_global_info_ready)
1358 return (DECL_INLINE (decl) && !flag_really_no_inline);
1359 /* Look if there is any clone around. */
1360 for (node = cgraph_node (decl); node; node = node->next_clone)
1361 if (node->global.inlined_to)
1362 return true;
1363 return false;
1364 }
1365
1366 static void
1367 ipa_passes (void)
1368 {
1369 cfun = NULL;
1370 tree_register_cfg_hooks ();
1371 bitmap_obstack_initialize (NULL);
1372 execute_ipa_pass_list (all_ipa_passes);
1373 bitmap_obstack_release (NULL);
1374 }
1375
1376 /* Perform simple optimizations based on callgraph. */
1377
1378 void
1379 cgraph_optimize (void)
1380 {
1381 #ifdef ENABLE_CHECKING
1382 verify_cgraph ();
1383 #endif
1384 if (!flag_unit_at_a_time)
1385 {
1386 cgraph_output_pending_asms ();
1387 cgraph_varpool_assemble_pending_decls ();
1388 return;
1389 }
1390
1391 process_pending_assemble_externals ();
1392
1393 /* Frontend may output common variables after the unit has been finalized.
1394 It is safe to deal with them here as they are always zero initialized. */
1395 cgraph_varpool_analyze_pending_decls ();
1396
1397 timevar_push (TV_CGRAPHOPT);
1398 if (!quiet_flag)
1399 fprintf (stderr, "Performing intraprocedural optimizations\n");
1400
1401 cgraph_function_and_variable_visibility ();
1402 if (cgraph_dump_file)
1403 {
1404 fprintf (cgraph_dump_file, "Marked ");
1405 dump_cgraph (cgraph_dump_file);
1406 }
1407 ipa_passes ();
1408 /* This pass remove bodies of extern inline functions we never inlined.
1409 Do this later so other IPA passes see what is really going on. */
1410 cgraph_remove_unreachable_nodes (false, dump_file);
1411 cgraph_global_info_ready = true;
1412 if (cgraph_dump_file)
1413 {
1414 fprintf (cgraph_dump_file, "Optimized ");
1415 dump_cgraph (cgraph_dump_file);
1416 dump_varpool (cgraph_dump_file);
1417 }
1418 timevar_pop (TV_CGRAPHOPT);
1419
1420 /* Output everything. */
1421 if (!quiet_flag)
1422 fprintf (stderr, "Assembling functions:\n");
1423 #ifdef ENABLE_CHECKING
1424 verify_cgraph ();
1425 #endif
1426
1427 cgraph_mark_functions_to_output ();
1428
1429 if (!flag_toplevel_reorder)
1430 cgraph_output_in_order ();
1431 else
1432 {
1433 cgraph_output_pending_asms ();
1434
1435 cgraph_expand_all_functions ();
1436 cgraph_varpool_remove_unreferenced_decls ();
1437
1438 cgraph_varpool_assemble_pending_decls ();
1439 }
1440
1441 if (cgraph_dump_file)
1442 {
1443 fprintf (cgraph_dump_file, "\nFinal ");
1444 dump_cgraph (cgraph_dump_file);
1445 }
1446 #ifdef ENABLE_CHECKING
1447 verify_cgraph ();
1448 /* Double check that all inline clones are gone and that all
1449 function bodies have been released from memory. */
1450 if (flag_unit_at_a_time
1451 && !dump_enabled_p (TDI_tree_all)
1452 && !(sorrycount || errorcount))
1453 {
1454 struct cgraph_node *node;
1455 bool error_found = false;
1456
1457 for (node = cgraph_nodes; node; node = node->next)
1458 if (node->analyzed
1459 && (node->global.inlined_to
1460 || DECL_SAVED_TREE (node->decl)))
1461 {
1462 error_found = true;
1463 dump_cgraph_node (stderr, node);
1464 }
1465 if (error_found)
1466 internal_error ("nodes with no released memory found");
1467 }
1468 #endif
1469 }
1470
1471 /* Generate and emit a static constructor or destructor. WHICH must be
1472 one of 'I' or 'D'. BODY should be a STATEMENT_LIST containing
1473 GENERIC statements. */
1474
1475 void
1476 cgraph_build_static_cdtor (char which, tree body, int priority)
1477 {
1478 static int counter = 0;
1479 char which_buf[16];
1480 tree decl, name, resdecl;
1481
1482 sprintf (which_buf, "%c_%d", which, counter++);
1483 name = get_file_function_name_long (which_buf);
1484
1485 decl = build_decl (FUNCTION_DECL, name,
1486 build_function_type (void_type_node, void_list_node));
1487 current_function_decl = decl;
1488
1489 resdecl = build_decl (RESULT_DECL, NULL_TREE, void_type_node);
1490 DECL_ARTIFICIAL (resdecl) = 1;
1491 DECL_IGNORED_P (resdecl) = 1;
1492 DECL_RESULT (decl) = resdecl;
1493
1494 allocate_struct_function (decl);
1495
1496 TREE_STATIC (decl) = 1;
1497 TREE_USED (decl) = 1;
1498 DECL_ARTIFICIAL (decl) = 1;
1499 DECL_IGNORED_P (decl) = 1;
1500 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1501 DECL_SAVED_TREE (decl) = body;
1502 TREE_PUBLIC (decl) = ! targetm.have_ctors_dtors;
1503 DECL_UNINLINABLE (decl) = 1;
1504
1505 DECL_INITIAL (decl) = make_node (BLOCK);
1506 TREE_USED (DECL_INITIAL (decl)) = 1;
1507
1508 DECL_SOURCE_LOCATION (decl) = input_location;
1509 cfun->function_end_locus = input_location;
1510
1511 switch (which)
1512 {
1513 case 'I':
1514 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1515 break;
1516 case 'D':
1517 DECL_STATIC_DESTRUCTOR (decl) = 1;
1518 break;
1519 default:
1520 gcc_unreachable ();
1521 }
1522
1523 gimplify_function_tree (decl);
1524
1525 /* ??? We will get called LATE in the compilation process. */
1526 if (cgraph_global_info_ready)
1527 {
1528 tree_lowering_passes (decl);
1529 tree_rest_of_compilation (decl);
1530 }
1531 else
1532 cgraph_finalize_function (decl, 0);
1533
1534 if (targetm.have_ctors_dtors)
1535 {
1536 void (*fn) (rtx, int);
1537
1538 if (which == 'I')
1539 fn = targetm.asm_out.constructor;
1540 else
1541 fn = targetm.asm_out.destructor;
1542 fn (XEXP (DECL_RTL (decl), 0), priority);
1543 }
1544 }
1545
1546 void
1547 init_cgraph (void)
1548 {
1549 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1550 }
1551
1552 /* The edges representing the callers of the NEW_VERSION node were
1553 fixed by cgraph_function_versioning (), now the call_expr in their
1554 respective tree code should be updated to call the NEW_VERSION. */
1555
1556 static void
1557 update_call_expr (struct cgraph_node *new_version)
1558 {
1559 struct cgraph_edge *e;
1560
1561 gcc_assert (new_version);
1562 for (e = new_version->callers; e; e = e->next_caller)
1563 /* Update the call expr on the edges
1564 to call the new version. */
1565 TREE_OPERAND (TREE_OPERAND (get_call_expr_in (e->call_stmt), 0), 0) = new_version->decl;
1566 }
1567
1568
1569 /* Create a new cgraph node which is the new version of
1570 OLD_VERSION node. REDIRECT_CALLERS holds the callers
1571 edges which should be redirected to point to
1572 NEW_VERSION. ALL the callees edges of OLD_VERSION
1573 are cloned to the new version node. Return the new
1574 version node. */
1575
1576 static struct cgraph_node *
1577 cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
1578 tree new_decl,
1579 VEC(cgraph_edge_p,heap) *redirect_callers)
1580 {
1581 struct cgraph_node *new_version;
1582 struct cgraph_edge *e, *new_e;
1583 struct cgraph_edge *next_callee;
1584 unsigned i;
1585
1586 gcc_assert (old_version);
1587
1588 new_version = cgraph_node (new_decl);
1589
1590 new_version->analyzed = true;
1591 new_version->local = old_version->local;
1592 new_version->global = old_version->global;
1593 new_version->rtl = new_version->rtl;
1594 new_version->reachable = true;
1595 new_version->count = old_version->count;
1596
1597 /* Clone the old node callees. Recursive calls are
1598 also cloned. */
1599 for (e = old_version->callees;e; e=e->next_callee)
1600 {
1601 new_e = cgraph_clone_edge (e, new_version, e->call_stmt, 0, e->loop_nest, true);
1602 new_e->count = e->count;
1603 }
1604 /* Fix recursive calls.
1605 If OLD_VERSION has a recursive call after the
1606 previous edge cloning, the new version will have an edge
1607 pointing to the old version, which is wrong;
1608 Redirect it to point to the new version. */
1609 for (e = new_version->callees ; e; e = next_callee)
1610 {
1611 next_callee = e->next_callee;
1612 if (e->callee == old_version)
1613 cgraph_redirect_edge_callee (e, new_version);
1614
1615 if (!next_callee)
1616 break;
1617 }
1618 for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
1619 {
1620 /* Redirect calls to the old version node to point to its new
1621 version. */
1622 cgraph_redirect_edge_callee (e, new_version);
1623 }
1624
1625 return new_version;
1626 }
1627
1628 /* Perform function versioning.
1629 Function versioning includes copying of the tree and
1630 a callgraph update (creating a new cgraph node and updating
1631 its callees and callers).
1632
1633 REDIRECT_CALLERS varray includes the edges to be redirected
1634 to the new version.
1635
1636 TREE_MAP is a mapping of tree nodes we want to replace with
1637 new ones (according to results of prior analysis).
1638 OLD_VERSION_NODE is the node that is versioned.
1639 It returns the new version's cgraph node. */
1640
1641 struct cgraph_node *
1642 cgraph_function_versioning (struct cgraph_node *old_version_node,
1643 VEC(cgraph_edge_p,heap) *redirect_callers,
1644 varray_type tree_map)
1645 {
1646 tree old_decl = old_version_node->decl;
1647 struct cgraph_node *new_version_node = NULL;
1648 tree new_decl;
1649
1650 if (!tree_versionable_function_p (old_decl))
1651 return NULL;
1652
1653 /* Make a new FUNCTION_DECL tree node for the
1654 new version. */
1655 new_decl = copy_node (old_decl);
1656
1657 /* Create the new version's call-graph node.
1658 and update the edges of the new node. */
1659 new_version_node =
1660 cgraph_copy_node_for_versioning (old_version_node, new_decl,
1661 redirect_callers);
1662
1663 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1664 tree_function_versioning (old_decl, new_decl, tree_map, false);
1665 /* Update the call_expr on the edges to call the new version node. */
1666 update_call_expr (new_version_node);
1667
1668 /* Update the new version's properties.
1669 Make The new version visible only within this translation unit.
1670 ??? We cannot use COMDAT linkage because there is no
1671 ABI support for this. */
1672 DECL_EXTERNAL (new_version_node->decl) = 0;
1673 DECL_ONE_ONLY (new_version_node->decl) = 0;
1674 TREE_PUBLIC (new_version_node->decl) = 0;
1675 DECL_COMDAT (new_version_node->decl) = 0;
1676 new_version_node->local.externally_visible = 0;
1677 new_version_node->local.local = 1;
1678 new_version_node->lowered = true;
1679 return new_version_node;
1680 }
1681
1682 /* Produce separate function body for inline clones so the offline copy can be
1683 modified without affecting them. */
1684 struct cgraph_node *
1685 save_inline_function_body (struct cgraph_node *node)
1686 {
1687 struct cgraph_node *first_clone;
1688
1689 gcc_assert (node == cgraph_node (node->decl));
1690
1691 cgraph_lower_function (node);
1692
1693 /* In non-unit-at-a-time we construct full fledged clone we never output to
1694 assembly file. This clone is pointed out by inline_decl of original function
1695 and inlining infrastructure knows how to deal with this. */
1696 if (!flag_unit_at_a_time)
1697 {
1698 struct cgraph_edge *e;
1699
1700 first_clone = cgraph_clone_node (node, node->count, 0, false);
1701 first_clone->needed = 0;
1702 first_clone->reachable = 1;
1703 /* Recursively clone all bodies. */
1704 for (e = first_clone->callees; e; e = e->next_callee)
1705 if (!e->inline_failed)
1706 cgraph_clone_inlined_nodes (e, true, false);
1707 }
1708 else
1709 first_clone = node->next_clone;
1710
1711 first_clone->decl = copy_node (node->decl);
1712 node->next_clone = NULL;
1713 if (!flag_unit_at_a_time)
1714 node->inline_decl = first_clone->decl;
1715 first_clone->prev_clone = NULL;
1716 cgraph_insert_node_to_hashtable (first_clone);
1717 gcc_assert (first_clone == cgraph_node (first_clone->decl));
1718
1719 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1720 tree_function_versioning (node->decl, first_clone->decl, NULL, true);
1721
1722 DECL_EXTERNAL (first_clone->decl) = 0;
1723 DECL_ONE_ONLY (first_clone->decl) = 0;
1724 TREE_PUBLIC (first_clone->decl) = 0;
1725 DECL_COMDAT (first_clone->decl) = 0;
1726
1727 for (node = first_clone->next_clone; node; node = node->next_clone)
1728 node->decl = first_clone->decl;
1729 #ifdef ENABLE_CHECKING
1730 verify_cgraph_node (first_clone);
1731 #endif
1732 return first_clone;
1733 }
1734