re PR middle-end/25962 (Pointer (null) check after the use in cgraph.c)
[gcc.git] / gcc / cgraphunit.c
1 /* Callgraph based intraprocedural optimizations.
2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
21
22 /* This module implements main driver of compilation process as well as
23 few basic intraprocedural optimizers.
24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
35 (There is one exception needed for implementing GCC extern inline function.)
36
37 - cgraph_varpool_finalize_variable
38
39 This function has same behavior as the above but is used for static
40 variables.
41
42 - cgraph_finalize_compilation_unit
43
44 This function is called once compilation unit is finalized and it will
45 no longer change.
46
47 In the unit-at-a-time the call-graph construction and local function
48 analysis takes place here. Bodies of unreachable functions are released
49 to conserve memory usage.
50
51 ??? The compilation unit in this point of view should be compilation
52 unit as defined by the language - for instance C frontend allows multiple
53 compilation units to be parsed at once and it should call function each
54 time parsing is done so we save memory.
55
56 - cgraph_optimize
57
58 In this unit-at-a-time compilation the intra procedural analysis takes
59 place here. In particular the static functions whose address is never
60 taken are marked as local. Backend can then use this information to
61 modify calling conventions, do better inlining or similar optimizations.
62
63 - cgraph_assemble_pending_functions
64 - cgraph_varpool_assemble_pending_variables
65
66 In non-unit-at-a-time mode these functions can be used to force compilation
67 of functions or variables that are known to be needed at given stage
68 of compilation
69
70 - cgraph_mark_needed_node
71 - cgraph_varpool_mark_needed_node
72
73 When function or variable is referenced by some hidden way (for instance
74 via assembly code and marked by attribute "used"), the call-graph data structure
75 must be updated accordingly by this function.
76
77 - analyze_expr callback
78
79 This function is responsible for lowering tree nodes not understood by
80 generic code into understandable ones or alternatively marking
81 callgraph and varpool nodes referenced by the as needed.
82
83 ??? On the tree-ssa genericizing should take place here and we will avoid
84 need for these hooks (replacing them by genericizing hook)
85
86 - expand_function callback
87
88 This function is used to expand function and pass it into RTL back-end.
89 Front-end should not make any assumptions about when this function can be
90 called. In particular cgraph_assemble_pending_functions,
91 cgraph_varpool_assemble_pending_variables, cgraph_finalize_function,
92 cgraph_varpool_finalize_function, cgraph_optimize can cause arbitrarily
93 previously finalized functions to be expanded.
94
95 We implement two compilation modes.
96
97 - unit-at-a-time: In this mode analyzing of all functions is deferred
98 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
99
100 In cgraph_finalize_compilation_unit the reachable functions are
101 analyzed. During analysis the call-graph edges from reachable
102 functions are constructed and their destinations are marked as
103 reachable. References to functions and variables are discovered too
104 and variables found to be needed output to the assembly file. Via
105 mark_referenced call in assemble_variable functions referenced by
106 static variables are noticed too.
107
108 The intra-procedural information is produced and its existence
109 indicated by global_info_ready. Once this flag is set it is impossible
110 to change function from !reachable to reachable and thus
111 assemble_variable no longer call mark_referenced.
112
113 Finally the call-graph is topologically sorted and all reachable functions
114 that has not been completely inlined or are not external are output.
115
116 ??? It is possible that reference to function or variable is optimized
117 out. We can not deal with this nicely because topological order is not
118 suitable for it. For tree-ssa we may consider another pass doing
119 optimization and re-discovering reachable functions.
120
121 ??? Reorganize code so variables are output very last and only if they
122 really has been referenced by produced code, so we catch more cases
123 where reference has been optimized out.
124
125 - non-unit-at-a-time
126
127 All functions are variables are output as early as possible to conserve
128 memory consumption. This may or may not result in less memory used but
129 it is still needed for some legacy code that rely on particular ordering
130 of things output from the compiler.
131
132 Varpool data structures are not used and variables are output directly.
133
134 Functions are output early using call of
135 cgraph_assemble_pending_function from cgraph_finalize_function. The
136 decision on whether function is needed is made more conservative so
137 uninlininable static functions are needed too. During the call-graph
138 construction the edge destinations are not marked as reachable and it
139 is completely relied upn assemble_variable to mark them. */
140
141
142 #include "config.h"
143 #include "system.h"
144 #include "coretypes.h"
145 #include "tm.h"
146 #include "tree.h"
147 #include "rtl.h"
148 #include "tree-flow.h"
149 #include "tree-inline.h"
150 #include "langhooks.h"
151 #include "pointer-set.h"
152 #include "toplev.h"
153 #include "flags.h"
154 #include "ggc.h"
155 #include "debug.h"
156 #include "target.h"
157 #include "cgraph.h"
158 #include "diagnostic.h"
159 #include "timevar.h"
160 #include "params.h"
161 #include "fibheap.h"
162 #include "c-common.h"
163 #include "intl.h"
164 #include "function.h"
165 #include "ipa-prop.h"
166 #include "tree-gimple.h"
167 #include "tree-pass.h"
168 #include "output.h"
169
170 static void cgraph_expand_all_functions (void);
171 static void cgraph_mark_functions_to_output (void);
172 static void cgraph_expand_function (struct cgraph_node *);
173 static tree record_reference (tree *, int *, void *);
174 static void cgraph_output_pending_asms (void);
175
176 /* Records tree nodes seen in record_reference. Simply using
177 walk_tree_without_duplicates doesn't guarantee each node is visited
178 once because it gets a new htab upon each recursive call from
179 record_reference itself. */
180 static struct pointer_set_t *visited_nodes;
181
182 static FILE *cgraph_dump_file;
183
184 /* Determine if function DECL is needed. That is, visible to something
185 either outside this translation unit, something magic in the system
186 configury, or (if not doing unit-at-a-time) to something we havn't
187 seen yet. */
188
189 static bool
190 decide_is_function_needed (struct cgraph_node *node, tree decl)
191 {
192 tree origin;
193 if (MAIN_NAME_P (DECL_NAME (decl))
194 && TREE_PUBLIC (decl))
195 {
196 node->local.externally_visible = true;
197 return true;
198 }
199
200 /* If the user told us it is used, then it must be so. */
201 if (node->local.externally_visible
202 || lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
203 return true;
204
205 /* ??? If the assembler name is set by hand, it is possible to assemble
206 the name later after finalizing the function and the fact is noticed
207 in assemble_name then. This is arguably a bug. */
208 if (DECL_ASSEMBLER_NAME_SET_P (decl)
209 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
210 return true;
211
212 /* If we decided it was needed before, but at the time we didn't have
213 the body of the function available, then it's still needed. We have
214 to go back and re-check its dependencies now. */
215 if (node->needed)
216 return true;
217
218 /* Externally visible functions must be output. The exception is
219 COMDAT functions that must be output only when they are needed.
220
221 When not optimizing, also output the static functions. (see
222 PR25962)
223 */
224 if (((TREE_PUBLIC (decl) || !optimize) && !flag_whole_program)
225 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
226 return true;
227
228 /* Constructors and destructors are reachable from the runtime by
229 some mechanism. */
230 if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
231 return true;
232
233 if (flag_unit_at_a_time)
234 return false;
235
236 /* If not doing unit at a time, then we'll only defer this function
237 if its marked for inlining. Otherwise we want to emit it now. */
238
239 /* "extern inline" functions are never output locally. */
240 if (DECL_EXTERNAL (decl))
241 return false;
242 /* Nested functions of extern inline function shall not be emit unless
243 we inlined the origin. */
244 for (origin = decl_function_context (decl); origin;
245 origin = decl_function_context (origin))
246 if (DECL_EXTERNAL (origin))
247 return false;
248 /* We want to emit COMDAT functions only when absolutely necessary. */
249 if (DECL_COMDAT (decl))
250 return false;
251 if (!DECL_INLINE (decl)
252 || (!node->local.disregard_inline_limits
253 /* When declared inline, defer even the uninlinable functions.
254 This allows them to be eliminated when unused. */
255 && !DECL_DECLARED_INLINE_P (decl)
256 && (!node->local.inlinable || !cgraph_default_inline_p (node, NULL))))
257 return true;
258
259 return false;
260 }
261
262 /* Walk the decls we marked as necessary and see if they reference new
263 variables or functions and add them into the worklists. */
264 static bool
265 cgraph_varpool_analyze_pending_decls (void)
266 {
267 bool changed = false;
268 timevar_push (TV_CGRAPH);
269
270 while (cgraph_varpool_first_unanalyzed_node)
271 {
272 tree decl = cgraph_varpool_first_unanalyzed_node->decl;
273
274 cgraph_varpool_first_unanalyzed_node->analyzed = true;
275
276 cgraph_varpool_first_unanalyzed_node = cgraph_varpool_first_unanalyzed_node->next_needed;
277
278 if (DECL_INITIAL (decl))
279 {
280 visited_nodes = pointer_set_create ();
281 walk_tree (&DECL_INITIAL (decl), record_reference, NULL, visited_nodes);
282 pointer_set_destroy (visited_nodes);
283 visited_nodes = NULL;
284 }
285 changed = true;
286 }
287 timevar_pop (TV_CGRAPH);
288 return changed;
289 }
290
291 /* Optimization of function bodies might've rendered some variables as
292 unnecessary so we want to avoid these from being compiled.
293
294 This is done by pruning the queue and keeping only the variables that
295 really appear needed (ie they are either externally visible or referenced
296 by compiled function). Re-doing the reachability analysis on variables
297 brings back the remaining variables referenced by these. */
298 static void
299 cgraph_varpool_remove_unreferenced_decls (void)
300 {
301 struct cgraph_varpool_node *next, *node = cgraph_varpool_nodes_queue;
302
303 cgraph_varpool_reset_queue ();
304
305 if (errorcount || sorrycount)
306 return;
307
308 while (node)
309 {
310 tree decl = node->decl;
311 next = node->next_needed;
312 node->needed = 0;
313
314 if (node->finalized
315 && ((DECL_ASSEMBLER_NAME_SET_P (decl)
316 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
317 || node->force_output
318 || decide_is_variable_needed (node, decl)
319 /* ??? Cgraph does not yet rule the world with an iron hand,
320 and does not control the emission of debug information.
321 After a variable has its DECL_RTL set, we must assume that
322 it may be referenced by the debug information, and we can
323 no longer elide it. */
324 || DECL_RTL_SET_P (decl)))
325 cgraph_varpool_mark_needed_node (node);
326
327 node = next;
328 }
329 /* Make sure we mark alias targets as used targets. */
330 finish_aliases_1 ();
331 cgraph_varpool_analyze_pending_decls ();
332 }
333
334
335 /* When not doing unit-at-a-time, output all functions enqueued.
336 Return true when such a functions were found. */
337
338 bool
339 cgraph_assemble_pending_functions (void)
340 {
341 bool output = false;
342
343 if (flag_unit_at_a_time)
344 return false;
345
346 cgraph_output_pending_asms ();
347
348 while (cgraph_nodes_queue)
349 {
350 struct cgraph_node *n = cgraph_nodes_queue;
351
352 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
353 n->next_needed = NULL;
354 if (!n->global.inlined_to
355 && !n->alias
356 && !DECL_EXTERNAL (n->decl))
357 {
358 cgraph_expand_function (n);
359 output = true;
360 }
361 }
362
363 /* Process CGRAPH_EXPAND_QUEUE, these are functions created during
364 the expansion process. Note that this queue may grow as its
365 being processed, as the new functions may generate new ones. */
366 while (cgraph_expand_queue)
367 {
368 struct cgraph_node *n = cgraph_expand_queue;
369 cgraph_expand_queue = cgraph_expand_queue->next_needed;
370 n->next_needed = NULL;
371 cgraph_finalize_function (n->decl, false);
372 output = true;
373 }
374
375 return output;
376 }
377
378
379 /* As an GCC extension we allow redefinition of the function. The
380 semantics when both copies of bodies differ is not well defined.
381 We replace the old body with new body so in unit at a time mode
382 we always use new body, while in normal mode we may end up with
383 old body inlined into some functions and new body expanded and
384 inlined in others.
385
386 ??? It may make more sense to use one body for inlining and other
387 body for expanding the function but this is difficult to do. */
388
389 static void
390 cgraph_reset_node (struct cgraph_node *node)
391 {
392 /* If node->output is set, then this is a unit-at-a-time compilation
393 and we have already begun whole-unit analysis. This is *not*
394 testing for whether we've already emitted the function. That
395 case can be sort-of legitimately seen with real function
396 redefinition errors. I would argue that the front end should
397 never present us with such a case, but don't enforce that for now. */
398 gcc_assert (!node->output);
399
400 /* Reset our data structures so we can analyze the function again. */
401 memset (&node->local, 0, sizeof (node->local));
402 memset (&node->global, 0, sizeof (node->global));
403 memset (&node->rtl, 0, sizeof (node->rtl));
404 node->analyzed = false;
405 node->local.redefined_extern_inline = true;
406 node->local.finalized = false;
407
408 if (!flag_unit_at_a_time)
409 {
410 struct cgraph_node *n;
411
412 for (n = cgraph_nodes; n; n = n->next)
413 if (n->global.inlined_to == node)
414 cgraph_remove_node (n);
415 }
416
417 cgraph_node_remove_callees (node);
418
419 /* We may need to re-queue the node for assembling in case
420 we already proceeded it and ignored as not needed. */
421 if (node->reachable && !flag_unit_at_a_time)
422 {
423 struct cgraph_node *n;
424
425 for (n = cgraph_nodes_queue; n; n = n->next_needed)
426 if (n == node)
427 break;
428 if (!n)
429 node->reachable = 0;
430 }
431 }
432
433 static void
434 cgraph_lower_function (struct cgraph_node *node)
435 {
436 if (node->lowered)
437 return;
438 tree_lowering_passes (node->decl);
439 node->lowered = true;
440 }
441
442 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
443 logic in effect. If NESTED is true, then our caller cannot stand to have
444 the garbage collector run at the moment. We would need to either create
445 a new GC context, or just not compile right now. */
446
447 void
448 cgraph_finalize_function (tree decl, bool nested)
449 {
450 struct cgraph_node *node = cgraph_node (decl);
451
452 if (node->local.finalized)
453 cgraph_reset_node (node);
454
455 notice_global_symbol (decl);
456 node->decl = decl;
457 node->local.finalized = true;
458 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
459 if (node->nested)
460 lower_nested_functions (decl);
461 gcc_assert (!node->nested);
462
463 /* If not unit at a time, then we need to create the call graph
464 now, so that called functions can be queued and emitted now. */
465 if (!flag_unit_at_a_time)
466 {
467 cgraph_analyze_function (node);
468 cgraph_decide_inlining_incrementally (node, false);
469 }
470
471 if (decide_is_function_needed (node, decl))
472 cgraph_mark_needed_node (node);
473
474 /* Since we reclaim unreachable nodes at the end of every language
475 level unit, we need to be conservative about possible entry points
476 there. */
477 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)))
478 cgraph_mark_reachable_node (node);
479
480 /* If not unit at a time, go ahead and emit everything we've found
481 to be reachable at this time. */
482 if (!nested)
483 {
484 if (!cgraph_assemble_pending_functions ())
485 ggc_collect ();
486 }
487
488 /* If we've not yet emitted decl, tell the debug info about it. */
489 if (!TREE_ASM_WRITTEN (decl))
490 (*debug_hooks->deferred_inline_function) (decl);
491
492 /* Possibly warn about unused parameters. */
493 if (warn_unused_parameter)
494 do_warn_unused_parameter (decl);
495 }
496
497 /* Walk tree and record all calls. Called via walk_tree. */
498 static tree
499 record_reference (tree *tp, int *walk_subtrees, void *data)
500 {
501 tree t = *tp;
502
503 switch (TREE_CODE (t))
504 {
505 case VAR_DECL:
506 /* ??? Really, we should mark this decl as *potentially* referenced
507 by this function and re-examine whether the decl is actually used
508 after rtl has been generated. */
509 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
510 {
511 cgraph_varpool_mark_needed_node (cgraph_varpool_node (t));
512 if (lang_hooks.callgraph.analyze_expr)
513 return lang_hooks.callgraph.analyze_expr (tp, walk_subtrees,
514 data);
515 }
516 break;
517
518 case FDESC_EXPR:
519 case ADDR_EXPR:
520 if (flag_unit_at_a_time)
521 {
522 /* Record dereferences to the functions. This makes the
523 functions reachable unconditionally. */
524 tree decl = TREE_OPERAND (*tp, 0);
525 if (TREE_CODE (decl) == FUNCTION_DECL)
526 cgraph_mark_needed_node (cgraph_node (decl));
527 }
528 break;
529
530 default:
531 /* Save some cycles by not walking types and declaration as we
532 won't find anything useful there anyway. */
533 if (IS_TYPE_OR_DECL_P (*tp))
534 {
535 *walk_subtrees = 0;
536 break;
537 }
538
539 if ((unsigned int) TREE_CODE (t) >= LAST_AND_UNUSED_TREE_CODE)
540 return lang_hooks.callgraph.analyze_expr (tp, walk_subtrees, data);
541 break;
542 }
543
544 return NULL;
545 }
546
547 /* Create cgraph edges for function calls inside BODY from NODE. */
548
549 static void
550 cgraph_create_edges (struct cgraph_node *node, tree body)
551 {
552 basic_block bb;
553
554 struct function *this_cfun = DECL_STRUCT_FUNCTION (body);
555 block_stmt_iterator bsi;
556 tree step;
557 visited_nodes = pointer_set_create ();
558
559 /* Reach the trees by walking over the CFG, and note the
560 enclosing basic-blocks in the call edges. */
561 FOR_EACH_BB_FN (bb, this_cfun)
562 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
563 {
564 tree stmt = bsi_stmt (bsi);
565 tree call = get_call_expr_in (stmt);
566 tree decl;
567
568 if (call && (decl = get_callee_fndecl (call)))
569 {
570 cgraph_create_edge (node, cgraph_node (decl), stmt,
571 bb->count,
572 bb->loop_depth);
573 walk_tree (&TREE_OPERAND (call, 1),
574 record_reference, node, visited_nodes);
575 if (TREE_CODE (stmt) == MODIFY_EXPR)
576 walk_tree (&TREE_OPERAND (stmt, 0),
577 record_reference, node, visited_nodes);
578 }
579 else
580 walk_tree (bsi_stmt_ptr (bsi), record_reference, node, visited_nodes);
581 }
582
583 /* Look for initializers of constant variables and private statics. */
584 for (step = DECL_STRUCT_FUNCTION (body)->unexpanded_var_list;
585 step;
586 step = TREE_CHAIN (step))
587 {
588 tree decl = TREE_VALUE (step);
589 if (TREE_CODE (decl) == VAR_DECL
590 && (TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
591 && flag_unit_at_a_time)
592 cgraph_varpool_finalize_decl (decl);
593 else if (TREE_CODE (decl) == VAR_DECL && DECL_INITIAL (decl))
594 walk_tree (&DECL_INITIAL (decl), record_reference, node, visited_nodes);
595 }
596
597 pointer_set_destroy (visited_nodes);
598 visited_nodes = NULL;
599 }
600
601 /* Give initial reasons why inlining would fail. Those gets
602 either NULLified or usually overwritten by more precise reason
603 later. */
604 static void
605 initialize_inline_failed (struct cgraph_node *node)
606 {
607 struct cgraph_edge *e;
608
609 for (e = node->callers; e; e = e->next_caller)
610 {
611 gcc_assert (!e->callee->global.inlined_to);
612 gcc_assert (e->inline_failed);
613 if (node->local.redefined_extern_inline)
614 e->inline_failed = N_("redefined extern inline functions are not "
615 "considered for inlining");
616 else if (!node->local.inlinable)
617 e->inline_failed = N_("function not inlinable");
618 else
619 e->inline_failed = N_("function not considered for inlining");
620 }
621 }
622
623 /* Rebuild call edges from current function after a passes not aware
624 of cgraph updating. */
625 static unsigned int
626 rebuild_cgraph_edges (void)
627 {
628 basic_block bb;
629 struct cgraph_node *node = cgraph_node (current_function_decl);
630 block_stmt_iterator bsi;
631
632 cgraph_node_remove_callees (node);
633
634 node->count = ENTRY_BLOCK_PTR->count;
635
636 FOR_EACH_BB (bb)
637 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
638 {
639 tree stmt = bsi_stmt (bsi);
640 tree call = get_call_expr_in (stmt);
641 tree decl;
642
643 if (call && (decl = get_callee_fndecl (call)))
644 cgraph_create_edge (node, cgraph_node (decl), stmt,
645 bb->count,
646 bb->loop_depth);
647 }
648 initialize_inline_failed (node);
649 gcc_assert (!node->global.inlined_to);
650 return 0;
651 }
652
653 struct tree_opt_pass pass_rebuild_cgraph_edges =
654 {
655 NULL, /* name */
656 NULL, /* gate */
657 rebuild_cgraph_edges, /* execute */
658 NULL, /* sub */
659 NULL, /* next */
660 0, /* static_pass_number */
661 0, /* tv_id */
662 PROP_cfg, /* properties_required */
663 0, /* properties_provided */
664 0, /* properties_destroyed */
665 0, /* todo_flags_start */
666 0, /* todo_flags_finish */
667 0 /* letter */
668 };
669
670 /* Verify cgraph nodes of given cgraph node. */
671 void
672 verify_cgraph_node (struct cgraph_node *node)
673 {
674 struct cgraph_edge *e;
675 struct cgraph_node *main_clone;
676 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
677 basic_block this_block;
678 block_stmt_iterator bsi;
679 bool error_found = false;
680
681 timevar_push (TV_CGRAPH_VERIFY);
682 for (e = node->callees; e; e = e->next_callee)
683 if (e->aux)
684 {
685 error ("aux field set for edge %s->%s",
686 cgraph_node_name (e->caller), cgraph_node_name (e->callee));
687 error_found = true;
688 }
689 if (node->count < 0)
690 {
691 error ("Execution count is negative");
692 error_found = true;
693 }
694 for (e = node->callers; e; e = e->next_caller)
695 {
696 if (e->count < 0)
697 {
698 error ("caller edge count is negative");
699 error_found = true;
700 }
701 if (!e->inline_failed)
702 {
703 if (node->global.inlined_to
704 != (e->caller->global.inlined_to
705 ? e->caller->global.inlined_to : e->caller))
706 {
707 error ("inlined_to pointer is wrong");
708 error_found = true;
709 }
710 if (node->callers->next_caller)
711 {
712 error ("multiple inline callers");
713 error_found = true;
714 }
715 }
716 else
717 if (node->global.inlined_to)
718 {
719 error ("inlined_to pointer set for noninline callers");
720 error_found = true;
721 }
722 }
723 if (!node->callers && node->global.inlined_to)
724 {
725 error ("inlined_to pointer is set but no predecesors found");
726 error_found = true;
727 }
728 if (node->global.inlined_to == node)
729 {
730 error ("inlined_to pointer refers to itself");
731 error_found = true;
732 }
733
734 for (main_clone = cgraph_node (node->decl); main_clone;
735 main_clone = main_clone->next_clone)
736 if (main_clone == node)
737 break;
738 if (!cgraph_node (node->decl))
739 {
740 error ("node not found in cgraph_hash");
741 error_found = true;
742 }
743
744 if (node->analyzed
745 && DECL_SAVED_TREE (node->decl) && !TREE_ASM_WRITTEN (node->decl)
746 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to))
747 {
748 if (this_cfun->cfg)
749 {
750 /* The nodes we're interested in are never shared, so walk
751 the tree ignoring duplicates. */
752 visited_nodes = pointer_set_create ();
753 /* Reach the trees by walking over the CFG, and note the
754 enclosing basic-blocks in the call edges. */
755 FOR_EACH_BB_FN (this_block, this_cfun)
756 for (bsi = bsi_start (this_block); !bsi_end_p (bsi); bsi_next (&bsi))
757 {
758 tree stmt = bsi_stmt (bsi);
759 tree call = get_call_expr_in (stmt);
760 tree decl;
761 if (call && (decl = get_callee_fndecl (call)))
762 {
763 struct cgraph_edge *e = cgraph_edge (node, stmt);
764 if (e)
765 {
766 if (e->aux)
767 {
768 error ("shared call_stmt:");
769 debug_generic_stmt (stmt);
770 error_found = true;
771 }
772 if (e->callee->decl != cgraph_node (decl)->decl
773 && e->inline_failed)
774 {
775 error ("edge points to wrong declaration:");
776 debug_tree (e->callee->decl);
777 fprintf (stderr," Instead of:");
778 debug_tree (decl);
779 }
780 e->aux = (void *)1;
781 }
782 else
783 {
784 error ("missing callgraph edge for call stmt:");
785 debug_generic_stmt (stmt);
786 error_found = true;
787 }
788 }
789 }
790 pointer_set_destroy (visited_nodes);
791 visited_nodes = NULL;
792 }
793 else
794 /* No CFG available?! */
795 gcc_unreachable ();
796
797 for (e = node->callees; e; e = e->next_callee)
798 {
799 if (!e->aux)
800 {
801 error ("edge %s->%s has no corresponding call_stmt",
802 cgraph_node_name (e->caller),
803 cgraph_node_name (e->callee));
804 debug_generic_stmt (e->call_stmt);
805 error_found = true;
806 }
807 e->aux = 0;
808 }
809 }
810 if (error_found)
811 {
812 dump_cgraph_node (stderr, node);
813 internal_error ("verify_cgraph_node failed");
814 }
815 timevar_pop (TV_CGRAPH_VERIFY);
816 }
817
818 /* Verify whole cgraph structure. */
819 void
820 verify_cgraph (void)
821 {
822 struct cgraph_node *node;
823
824 if (sorrycount || errorcount)
825 return;
826
827 for (node = cgraph_nodes; node; node = node->next)
828 verify_cgraph_node (node);
829 }
830
831 /* Output one variable, if necessary. Return whether we output it. */
832 static bool
833 cgraph_varpool_assemble_decl (struct cgraph_varpool_node *node)
834 {
835 tree decl = node->decl;
836
837 if (!TREE_ASM_WRITTEN (decl)
838 && !node->alias
839 && !DECL_EXTERNAL (decl)
840 && (TREE_CODE (decl) != VAR_DECL || !DECL_HAS_VALUE_EXPR_P (decl)))
841 {
842 assemble_variable (decl, 0, 1, 0);
843 /* Local static variables are never seen by check_global_declarations
844 so we need to output debug info by hand. */
845 if (DECL_CONTEXT (decl)
846 && (TREE_CODE (DECL_CONTEXT (decl)) == BLOCK
847 || TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
848 && errorcount == 0 && sorrycount == 0)
849 {
850 timevar_push (TV_SYMOUT);
851 (*debug_hooks->global_decl) (decl);
852 timevar_pop (TV_SYMOUT);
853 }
854 return true;
855 }
856
857 return false;
858 }
859
860 /* Output all variables enqueued to be assembled. */
861 bool
862 cgraph_varpool_assemble_pending_decls (void)
863 {
864 bool changed = false;
865
866 if (errorcount || sorrycount)
867 return false;
868
869 /* EH might mark decls as needed during expansion. This should be safe since
870 we don't create references to new function, but it should not be used
871 elsewhere. */
872 cgraph_varpool_analyze_pending_decls ();
873
874 while (cgraph_varpool_nodes_queue)
875 {
876 struct cgraph_varpool_node *node = cgraph_varpool_nodes_queue;
877
878 cgraph_varpool_nodes_queue = cgraph_varpool_nodes_queue->next_needed;
879 if (cgraph_varpool_assemble_decl (node))
880 changed = true;
881 node->next_needed = NULL;
882 }
883 return changed;
884 }
885
886 /* Output all asm statements we have stored up to be output. */
887
888 static void
889 cgraph_output_pending_asms (void)
890 {
891 struct cgraph_asm_node *can;
892
893 if (errorcount || sorrycount)
894 return;
895
896 for (can = cgraph_asm_nodes; can; can = can->next)
897 assemble_asm (can->asm_str);
898 cgraph_asm_nodes = NULL;
899 }
900
901 /* Analyze the function scheduled to be output. */
902 void
903 cgraph_analyze_function (struct cgraph_node *node)
904 {
905 tree decl = node->decl;
906
907 current_function_decl = decl;
908 push_cfun (DECL_STRUCT_FUNCTION (decl));
909 cgraph_lower_function (node);
910
911 /* First kill forward declaration so reverse inlining works properly. */
912 cgraph_create_edges (node, decl);
913
914 node->local.inlinable = tree_inlinable_function_p (decl);
915 node->local.self_insns = estimate_num_insns (decl);
916 if (node->local.inlinable)
917 node->local.disregard_inline_limits
918 = lang_hooks.tree_inlining.disregard_inline_limits (decl);
919 initialize_inline_failed (node);
920 if (flag_really_no_inline && !node->local.disregard_inline_limits)
921 node->local.inlinable = 0;
922 /* Inlining characteristics are maintained by the cgraph_mark_inline. */
923 node->global.insns = node->local.self_insns;
924
925 node->analyzed = true;
926 pop_cfun ();
927 current_function_decl = NULL;
928 }
929
930 /* Analyze the whole compilation unit once it is parsed completely. */
931
932 void
933 cgraph_finalize_compilation_unit (void)
934 {
935 struct cgraph_node *node;
936 /* Keep track of already processed nodes when called multiple times for
937 intermodule optimization. */
938 static struct cgraph_node *first_analyzed;
939
940 finish_aliases_1 ();
941
942 if (!flag_unit_at_a_time)
943 {
944 cgraph_output_pending_asms ();
945 cgraph_assemble_pending_functions ();
946 return;
947 }
948
949 if (!quiet_flag)
950 {
951 fprintf (stderr, "\nAnalyzing compilation unit");
952 fflush (stderr);
953 }
954
955 timevar_push (TV_CGRAPH);
956 cgraph_varpool_analyze_pending_decls ();
957 if (cgraph_dump_file)
958 {
959 fprintf (cgraph_dump_file, "Initial entry points:");
960 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
961 if (node->needed && DECL_SAVED_TREE (node->decl))
962 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
963 fprintf (cgraph_dump_file, "\n");
964 }
965
966 /* Propagate reachability flag and lower representation of all reachable
967 functions. In the future, lowering will introduce new functions and
968 new entry points on the way (by template instantiation and virtual
969 method table generation for instance). */
970 while (cgraph_nodes_queue)
971 {
972 struct cgraph_edge *edge;
973 tree decl = cgraph_nodes_queue->decl;
974
975 node = cgraph_nodes_queue;
976 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
977 node->next_needed = NULL;
978
979 /* ??? It is possible to create extern inline function and later using
980 weak alias attribute to kill its body. See
981 gcc.c-torture/compile/20011119-1.c */
982 if (!DECL_SAVED_TREE (decl))
983 {
984 cgraph_reset_node (node);
985 continue;
986 }
987
988 gcc_assert (!node->analyzed && node->reachable);
989 gcc_assert (DECL_SAVED_TREE (decl));
990
991 cgraph_analyze_function (node);
992
993 for (edge = node->callees; edge; edge = edge->next_callee)
994 if (!edge->callee->reachable)
995 cgraph_mark_reachable_node (edge->callee);
996
997 cgraph_varpool_analyze_pending_decls ();
998 }
999
1000 /* Collect entry points to the unit. */
1001
1002 if (cgraph_dump_file)
1003 {
1004 fprintf (cgraph_dump_file, "Unit entry points:");
1005 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1006 if (node->needed && DECL_SAVED_TREE (node->decl))
1007 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1008 fprintf (cgraph_dump_file, "\n\nInitial ");
1009 dump_cgraph (cgraph_dump_file);
1010 }
1011
1012 if (cgraph_dump_file)
1013 fprintf (cgraph_dump_file, "\nReclaiming functions:");
1014
1015 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1016 {
1017 tree decl = node->decl;
1018
1019 if (node->local.finalized && !DECL_SAVED_TREE (decl))
1020 cgraph_reset_node (node);
1021
1022 if (!node->reachable && DECL_SAVED_TREE (decl))
1023 {
1024 if (cgraph_dump_file)
1025 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1026 cgraph_remove_node (node);
1027 continue;
1028 }
1029 else
1030 node->next_needed = NULL;
1031 gcc_assert (!node->local.finalized || DECL_SAVED_TREE (decl));
1032 gcc_assert (node->analyzed == node->local.finalized);
1033 }
1034 if (cgraph_dump_file)
1035 {
1036 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1037 dump_cgraph (cgraph_dump_file);
1038 }
1039 first_analyzed = cgraph_nodes;
1040 ggc_collect ();
1041 timevar_pop (TV_CGRAPH);
1042 }
1043 /* Figure out what functions we want to assemble. */
1044
1045 static void
1046 cgraph_mark_functions_to_output (void)
1047 {
1048 struct cgraph_node *node;
1049
1050 for (node = cgraph_nodes; node; node = node->next)
1051 {
1052 tree decl = node->decl;
1053 struct cgraph_edge *e;
1054
1055 gcc_assert (!node->output);
1056
1057 for (e = node->callers; e; e = e->next_caller)
1058 if (e->inline_failed)
1059 break;
1060
1061 /* We need to output all local functions that are used and not
1062 always inlined, as well as those that are reachable from
1063 outside the current compilation unit. */
1064 if (DECL_SAVED_TREE (decl)
1065 && !node->global.inlined_to
1066 && (node->needed
1067 || (e && node->reachable))
1068 && !TREE_ASM_WRITTEN (decl)
1069 && !DECL_EXTERNAL (decl))
1070 node->output = 1;
1071 else
1072 {
1073 /* We should've reclaimed all functions that are not needed. */
1074 #ifdef ENABLE_CHECKING
1075 if (!node->global.inlined_to && DECL_SAVED_TREE (decl)
1076 && !DECL_EXTERNAL (decl))
1077 {
1078 dump_cgraph_node (stderr, node);
1079 internal_error ("failed to reclaim unneeded function");
1080 }
1081 #endif
1082 gcc_assert (node->global.inlined_to || !DECL_SAVED_TREE (decl)
1083 || DECL_EXTERNAL (decl));
1084
1085 }
1086
1087 }
1088 }
1089
1090 /* Expand function specified by NODE. */
1091
1092 static void
1093 cgraph_expand_function (struct cgraph_node *node)
1094 {
1095 tree decl = node->decl;
1096
1097 /* We ought to not compile any inline clones. */
1098 gcc_assert (!node->global.inlined_to);
1099
1100 if (flag_unit_at_a_time)
1101 announce_function (decl);
1102
1103 cgraph_lower_function (node);
1104
1105 /* Generate RTL for the body of DECL. */
1106 lang_hooks.callgraph.expand_function (decl);
1107
1108 /* Make sure that BE didn't give up on compiling. */
1109 /* ??? Can happen with nested function of extern inline. */
1110 gcc_assert (TREE_ASM_WRITTEN (node->decl));
1111
1112 current_function_decl = NULL;
1113 if (!cgraph_preserve_function_body_p (node->decl))
1114 {
1115 DECL_SAVED_TREE (node->decl) = NULL;
1116 DECL_STRUCT_FUNCTION (node->decl) = NULL;
1117 DECL_INITIAL (node->decl) = error_mark_node;
1118 /* Eliminate all call edges. This is important so the call_expr no longer
1119 points to the dead function body. */
1120 cgraph_node_remove_callees (node);
1121 }
1122
1123 cgraph_function_flags_ready = true;
1124 }
1125
1126 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
1127
1128 bool
1129 cgraph_inline_p (struct cgraph_edge *e, const char **reason)
1130 {
1131 *reason = e->inline_failed;
1132 return !e->inline_failed;
1133 }
1134
1135
1136
1137 /* Expand all functions that must be output.
1138
1139 Attempt to topologically sort the nodes so function is output when
1140 all called functions are already assembled to allow data to be
1141 propagated across the callgraph. Use a stack to get smaller distance
1142 between a function and its callees (later we may choose to use a more
1143 sophisticated algorithm for function reordering; we will likely want
1144 to use subsections to make the output functions appear in top-down
1145 order). */
1146
1147 static void
1148 cgraph_expand_all_functions (void)
1149 {
1150 struct cgraph_node *node;
1151 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1152 int order_pos = 0, new_order_pos = 0;
1153 int i;
1154
1155 order_pos = cgraph_postorder (order);
1156 gcc_assert (order_pos == cgraph_n_nodes);
1157
1158 /* Garbage collector may remove inline clones we eliminate during
1159 optimization. So we must be sure to not reference them. */
1160 for (i = 0; i < order_pos; i++)
1161 if (order[i]->output)
1162 order[new_order_pos++] = order[i];
1163
1164 for (i = new_order_pos - 1; i >= 0; i--)
1165 {
1166 node = order[i];
1167 if (node->output)
1168 {
1169 gcc_assert (node->reachable);
1170 node->output = 0;
1171 cgraph_expand_function (node);
1172 }
1173 }
1174
1175 free (order);
1176
1177 /* Process CGRAPH_EXPAND_QUEUE, these are functions created during
1178 the expansion process. Note that this queue may grow as its
1179 being processed, as the new functions may generate new ones. */
1180 while (cgraph_expand_queue)
1181 {
1182 node = cgraph_expand_queue;
1183 cgraph_expand_queue = cgraph_expand_queue->next_needed;
1184 node->next_needed = NULL;
1185 node->output = 0;
1186 node->lowered = DECL_STRUCT_FUNCTION (node->decl)->cfg != NULL;
1187 cgraph_expand_function (node);
1188 }
1189 }
1190
1191 /* This is used to sort the node types by the cgraph order number. */
1192
1193 struct cgraph_order_sort
1194 {
1195 enum { ORDER_UNDEFINED = 0, ORDER_FUNCTION, ORDER_VAR, ORDER_ASM } kind;
1196 union
1197 {
1198 struct cgraph_node *f;
1199 struct cgraph_varpool_node *v;
1200 struct cgraph_asm_node *a;
1201 } u;
1202 };
1203
1204 /* Output all functions, variables, and asm statements in the order
1205 according to their order fields, which is the order in which they
1206 appeared in the file. This implements -fno-toplevel-reorder. In
1207 this mode we may output functions and variables which don't really
1208 need to be output. */
1209
1210 static void
1211 cgraph_output_in_order (void)
1212 {
1213 int max;
1214 size_t size;
1215 struct cgraph_order_sort *nodes;
1216 int i;
1217 struct cgraph_node *pf;
1218 struct cgraph_varpool_node *pv;
1219 struct cgraph_asm_node *pa;
1220
1221 max = cgraph_order;
1222 size = max * sizeof (struct cgraph_order_sort);
1223 nodes = (struct cgraph_order_sort *) alloca (size);
1224 memset (nodes, 0, size);
1225
1226 cgraph_varpool_analyze_pending_decls ();
1227
1228 for (pf = cgraph_nodes; pf; pf = pf->next)
1229 {
1230 if (pf->output)
1231 {
1232 i = pf->order;
1233 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1234 nodes[i].kind = ORDER_FUNCTION;
1235 nodes[i].u.f = pf;
1236 }
1237 }
1238
1239 for (pv = cgraph_varpool_nodes_queue; pv; pv = pv->next_needed)
1240 {
1241 i = pv->order;
1242 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1243 nodes[i].kind = ORDER_VAR;
1244 nodes[i].u.v = pv;
1245 }
1246
1247 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1248 {
1249 i = pa->order;
1250 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1251 nodes[i].kind = ORDER_ASM;
1252 nodes[i].u.a = pa;
1253 }
1254
1255 for (i = 0; i < max; ++i)
1256 {
1257 switch (nodes[i].kind)
1258 {
1259 case ORDER_FUNCTION:
1260 nodes[i].u.f->output = 0;
1261 cgraph_expand_function (nodes[i].u.f);
1262 break;
1263
1264 case ORDER_VAR:
1265 cgraph_varpool_assemble_decl (nodes[i].u.v);
1266 break;
1267
1268 case ORDER_ASM:
1269 assemble_asm (nodes[i].u.a->asm_str);
1270 break;
1271
1272 case ORDER_UNDEFINED:
1273 break;
1274
1275 default:
1276 gcc_unreachable ();
1277 }
1278 }
1279
1280 cgraph_asm_nodes = NULL;
1281 }
1282
1283 /* Mark visibility of all functions.
1284
1285 A local function is one whose calls can occur only in the current
1286 compilation unit and all its calls are explicit, so we can change
1287 its calling convention. We simply mark all static functions whose
1288 address is not taken as local.
1289
1290 We also change the TREE_PUBLIC flag of all declarations that are public
1291 in language point of view but we want to overwrite this default
1292 via visibilities for the backend point of view. */
1293
1294 static void
1295 cgraph_function_and_variable_visibility (void)
1296 {
1297 struct cgraph_node *node;
1298 struct cgraph_varpool_node *vnode;
1299
1300 for (node = cgraph_nodes; node; node = node->next)
1301 {
1302 if (node->reachable
1303 && (DECL_COMDAT (node->decl)
1304 || (!flag_whole_program
1305 && TREE_PUBLIC (node->decl) && !DECL_EXTERNAL (node->decl))))
1306 node->local.externally_visible = true;
1307 if (!node->local.externally_visible && node->analyzed
1308 && !DECL_EXTERNAL (node->decl))
1309 {
1310 gcc_assert (flag_whole_program || !TREE_PUBLIC (node->decl));
1311 TREE_PUBLIC (node->decl) = 0;
1312 }
1313 node->local.local = (!node->needed
1314 && node->analyzed
1315 && !DECL_EXTERNAL (node->decl)
1316 && !node->local.externally_visible);
1317 }
1318 for (vnode = cgraph_varpool_nodes_queue; vnode; vnode = vnode->next_needed)
1319 {
1320 if (vnode->needed
1321 && !flag_whole_program
1322 && (DECL_COMDAT (vnode->decl) || TREE_PUBLIC (vnode->decl)))
1323 vnode->externally_visible = 1;
1324 if (!vnode->externally_visible)
1325 {
1326 gcc_assert (flag_whole_program || !TREE_PUBLIC (vnode->decl));
1327 TREE_PUBLIC (vnode->decl) = 0;
1328 }
1329 gcc_assert (TREE_STATIC (vnode->decl));
1330 }
1331
1332 /* Because we have to be conservative on the boundaries of source
1333 level units, it is possible that we marked some functions in
1334 reachable just because they might be used later via external
1335 linkage, but after making them local they are really unreachable
1336 now. */
1337 cgraph_remove_unreachable_nodes (true, cgraph_dump_file);
1338
1339 if (cgraph_dump_file)
1340 {
1341 fprintf (cgraph_dump_file, "\nMarking local functions:");
1342 for (node = cgraph_nodes; node; node = node->next)
1343 if (node->local.local)
1344 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1345 fprintf (cgraph_dump_file, "\n\n");
1346 fprintf (cgraph_dump_file, "\nMarking externally visible functions:");
1347 for (node = cgraph_nodes; node; node = node->next)
1348 if (node->local.externally_visible)
1349 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1350 fprintf (cgraph_dump_file, "\n\n");
1351 }
1352 cgraph_function_flags_ready = true;
1353 }
1354
1355 /* Return true when function body of DECL still needs to be kept around
1356 for later re-use. */
1357 bool
1358 cgraph_preserve_function_body_p (tree decl)
1359 {
1360 struct cgraph_node *node;
1361 if (!cgraph_global_info_ready)
1362 return (DECL_INLINE (decl) && !flag_really_no_inline);
1363 /* Look if there is any clone around. */
1364 for (node = cgraph_node (decl); node; node = node->next_clone)
1365 if (node->global.inlined_to)
1366 return true;
1367 return false;
1368 }
1369
1370 static void
1371 ipa_passes (void)
1372 {
1373 cfun = NULL;
1374 tree_register_cfg_hooks ();
1375 bitmap_obstack_initialize (NULL);
1376 execute_ipa_pass_list (all_ipa_passes);
1377 bitmap_obstack_release (NULL);
1378 }
1379
1380 /* Perform simple optimizations based on callgraph. */
1381
1382 void
1383 cgraph_optimize (void)
1384 {
1385 #ifdef ENABLE_CHECKING
1386 verify_cgraph ();
1387 #endif
1388 if (!flag_unit_at_a_time)
1389 {
1390 cgraph_output_pending_asms ();
1391 cgraph_varpool_assemble_pending_decls ();
1392 return;
1393 }
1394
1395 process_pending_assemble_externals ();
1396
1397 /* Frontend may output common variables after the unit has been finalized.
1398 It is safe to deal with them here as they are always zero initialized. */
1399 cgraph_varpool_analyze_pending_decls ();
1400
1401 timevar_push (TV_CGRAPHOPT);
1402 if (!quiet_flag)
1403 fprintf (stderr, "Performing intraprocedural optimizations\n");
1404
1405 cgraph_function_and_variable_visibility ();
1406 if (cgraph_dump_file)
1407 {
1408 fprintf (cgraph_dump_file, "Marked ");
1409 dump_cgraph (cgraph_dump_file);
1410 }
1411 ipa_passes ();
1412 /* This pass remove bodies of extern inline functions we never inlined.
1413 Do this later so other IPA passes see what is really going on. */
1414 cgraph_remove_unreachable_nodes (false, dump_file);
1415 cgraph_global_info_ready = true;
1416 if (cgraph_dump_file)
1417 {
1418 fprintf (cgraph_dump_file, "Optimized ");
1419 dump_cgraph (cgraph_dump_file);
1420 dump_varpool (cgraph_dump_file);
1421 }
1422 timevar_pop (TV_CGRAPHOPT);
1423
1424 /* Output everything. */
1425 if (!quiet_flag)
1426 fprintf (stderr, "Assembling functions:\n");
1427 #ifdef ENABLE_CHECKING
1428 verify_cgraph ();
1429 #endif
1430
1431 cgraph_mark_functions_to_output ();
1432
1433 if (!flag_toplevel_reorder)
1434 cgraph_output_in_order ();
1435 else
1436 {
1437 cgraph_output_pending_asms ();
1438
1439 cgraph_expand_all_functions ();
1440 cgraph_varpool_remove_unreferenced_decls ();
1441
1442 cgraph_varpool_assemble_pending_decls ();
1443 }
1444
1445 if (cgraph_dump_file)
1446 {
1447 fprintf (cgraph_dump_file, "\nFinal ");
1448 dump_cgraph (cgraph_dump_file);
1449 }
1450 #ifdef ENABLE_CHECKING
1451 verify_cgraph ();
1452 /* Double check that all inline clones are gone and that all
1453 function bodies have been released from memory. */
1454 if (flag_unit_at_a_time
1455 && !dump_enabled_p (TDI_tree_all)
1456 && !(sorrycount || errorcount))
1457 {
1458 struct cgraph_node *node;
1459 bool error_found = false;
1460
1461 for (node = cgraph_nodes; node; node = node->next)
1462 if (node->analyzed
1463 && (node->global.inlined_to
1464 || DECL_SAVED_TREE (node->decl)))
1465 {
1466 error_found = true;
1467 dump_cgraph_node (stderr, node);
1468 }
1469 if (error_found)
1470 internal_error ("nodes with no released memory found");
1471 }
1472 #endif
1473 }
1474
1475 /* Generate and emit a static constructor or destructor. WHICH must be
1476 one of 'I' or 'D'. BODY should be a STATEMENT_LIST containing
1477 GENERIC statements. */
1478
1479 void
1480 cgraph_build_static_cdtor (char which, tree body, int priority)
1481 {
1482 static int counter = 0;
1483 char which_buf[16];
1484 tree decl, name, resdecl;
1485
1486 sprintf (which_buf, "%c_%d", which, counter++);
1487 name = get_file_function_name_long (which_buf);
1488
1489 decl = build_decl (FUNCTION_DECL, name,
1490 build_function_type (void_type_node, void_list_node));
1491 current_function_decl = decl;
1492
1493 resdecl = build_decl (RESULT_DECL, NULL_TREE, void_type_node);
1494 DECL_ARTIFICIAL (resdecl) = 1;
1495 DECL_IGNORED_P (resdecl) = 1;
1496 DECL_RESULT (decl) = resdecl;
1497
1498 allocate_struct_function (decl);
1499
1500 TREE_STATIC (decl) = 1;
1501 TREE_USED (decl) = 1;
1502 DECL_ARTIFICIAL (decl) = 1;
1503 DECL_IGNORED_P (decl) = 1;
1504 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1505 DECL_SAVED_TREE (decl) = body;
1506 TREE_PUBLIC (decl) = ! targetm.have_ctors_dtors;
1507 DECL_UNINLINABLE (decl) = 1;
1508
1509 DECL_INITIAL (decl) = make_node (BLOCK);
1510 TREE_USED (DECL_INITIAL (decl)) = 1;
1511
1512 DECL_SOURCE_LOCATION (decl) = input_location;
1513 cfun->function_end_locus = input_location;
1514
1515 switch (which)
1516 {
1517 case 'I':
1518 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1519 break;
1520 case 'D':
1521 DECL_STATIC_DESTRUCTOR (decl) = 1;
1522 break;
1523 default:
1524 gcc_unreachable ();
1525 }
1526
1527 gimplify_function_tree (decl);
1528
1529 /* ??? We will get called LATE in the compilation process. */
1530 if (cgraph_global_info_ready)
1531 {
1532 tree_lowering_passes (decl);
1533 tree_rest_of_compilation (decl);
1534 }
1535 else
1536 cgraph_finalize_function (decl, 0);
1537
1538 if (targetm.have_ctors_dtors)
1539 {
1540 void (*fn) (rtx, int);
1541
1542 if (which == 'I')
1543 fn = targetm.asm_out.constructor;
1544 else
1545 fn = targetm.asm_out.destructor;
1546 fn (XEXP (DECL_RTL (decl), 0), priority);
1547 }
1548 }
1549
1550 void
1551 init_cgraph (void)
1552 {
1553 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1554 }
1555
1556 /* The edges representing the callers of the NEW_VERSION node were
1557 fixed by cgraph_function_versioning (), now the call_expr in their
1558 respective tree code should be updated to call the NEW_VERSION. */
1559
1560 static void
1561 update_call_expr (struct cgraph_node *new_version)
1562 {
1563 struct cgraph_edge *e;
1564
1565 gcc_assert (new_version);
1566 for (e = new_version->callers; e; e = e->next_caller)
1567 /* Update the call expr on the edges
1568 to call the new version. */
1569 TREE_OPERAND (TREE_OPERAND (get_call_expr_in (e->call_stmt), 0), 0) = new_version->decl;
1570 }
1571
1572
1573 /* Create a new cgraph node which is the new version of
1574 OLD_VERSION node. REDIRECT_CALLERS holds the callers
1575 edges which should be redirected to point to
1576 NEW_VERSION. ALL the callees edges of OLD_VERSION
1577 are cloned to the new version node. Return the new
1578 version node. */
1579
1580 static struct cgraph_node *
1581 cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
1582 tree new_decl,
1583 VEC(cgraph_edge_p,heap) *redirect_callers)
1584 {
1585 struct cgraph_node *new_version;
1586 struct cgraph_edge *e, *new_e;
1587 struct cgraph_edge *next_callee;
1588 unsigned i;
1589
1590 gcc_assert (old_version);
1591
1592 new_version = cgraph_node (new_decl);
1593
1594 new_version->analyzed = true;
1595 new_version->local = old_version->local;
1596 new_version->global = old_version->global;
1597 new_version->rtl = new_version->rtl;
1598 new_version->reachable = true;
1599 new_version->count = old_version->count;
1600
1601 /* Clone the old node callees. Recursive calls are
1602 also cloned. */
1603 for (e = old_version->callees;e; e=e->next_callee)
1604 {
1605 new_e = cgraph_clone_edge (e, new_version, e->call_stmt, 0, e->loop_nest, true);
1606 new_e->count = e->count;
1607 }
1608 /* Fix recursive calls.
1609 If OLD_VERSION has a recursive call after the
1610 previous edge cloning, the new version will have an edge
1611 pointing to the old version, which is wrong;
1612 Redirect it to point to the new version. */
1613 for (e = new_version->callees ; e; e = next_callee)
1614 {
1615 next_callee = e->next_callee;
1616 if (e->callee == old_version)
1617 cgraph_redirect_edge_callee (e, new_version);
1618
1619 if (!next_callee)
1620 break;
1621 }
1622 for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
1623 {
1624 /* Redirect calls to the old version node to point to its new
1625 version. */
1626 cgraph_redirect_edge_callee (e, new_version);
1627 }
1628
1629 return new_version;
1630 }
1631
1632 /* Perform function versioning.
1633 Function versioning includes copying of the tree and
1634 a callgraph update (creating a new cgraph node and updating
1635 its callees and callers).
1636
1637 REDIRECT_CALLERS varray includes the edges to be redirected
1638 to the new version.
1639
1640 TREE_MAP is a mapping of tree nodes we want to replace with
1641 new ones (according to results of prior analysis).
1642 OLD_VERSION_NODE is the node that is versioned.
1643 It returns the new version's cgraph node. */
1644
1645 struct cgraph_node *
1646 cgraph_function_versioning (struct cgraph_node *old_version_node,
1647 VEC(cgraph_edge_p,heap) *redirect_callers,
1648 varray_type tree_map)
1649 {
1650 tree old_decl = old_version_node->decl;
1651 struct cgraph_node *new_version_node = NULL;
1652 tree new_decl;
1653
1654 if (!tree_versionable_function_p (old_decl))
1655 return NULL;
1656
1657 /* Make a new FUNCTION_DECL tree node for the
1658 new version. */
1659 new_decl = copy_node (old_decl);
1660
1661 /* Create the new version's call-graph node.
1662 and update the edges of the new node. */
1663 new_version_node =
1664 cgraph_copy_node_for_versioning (old_version_node, new_decl,
1665 redirect_callers);
1666
1667 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1668 tree_function_versioning (old_decl, new_decl, tree_map, false);
1669 /* Update the call_expr on the edges to call the new version node. */
1670 update_call_expr (new_version_node);
1671
1672 /* Update the new version's properties.
1673 Make The new version visible only within this translation unit.
1674 ??? We cannot use COMDAT linkage because there is no
1675 ABI support for this. */
1676 DECL_EXTERNAL (new_version_node->decl) = 0;
1677 DECL_ONE_ONLY (new_version_node->decl) = 0;
1678 TREE_PUBLIC (new_version_node->decl) = 0;
1679 DECL_COMDAT (new_version_node->decl) = 0;
1680 new_version_node->local.externally_visible = 0;
1681 new_version_node->local.local = 1;
1682 new_version_node->lowered = true;
1683 return new_version_node;
1684 }
1685
1686 /* Produce separate function body for inline clones so the offline copy can be
1687 modified without affecting them. */
1688 struct cgraph_node *
1689 save_inline_function_body (struct cgraph_node *node)
1690 {
1691 struct cgraph_node *first_clone;
1692
1693 gcc_assert (node == cgraph_node (node->decl));
1694
1695 cgraph_lower_function (node);
1696
1697 /* In non-unit-at-a-time we construct full fledged clone we never output to
1698 assembly file. This clone is pointed out by inline_decl of original function
1699 and inlining infrastructure knows how to deal with this. */
1700 if (!flag_unit_at_a_time)
1701 {
1702 struct cgraph_edge *e;
1703
1704 first_clone = cgraph_clone_node (node, node->count, 0, false);
1705 first_clone->needed = 0;
1706 first_clone->reachable = 1;
1707 /* Recursively clone all bodies. */
1708 for (e = first_clone->callees; e; e = e->next_callee)
1709 if (!e->inline_failed)
1710 cgraph_clone_inlined_nodes (e, true, false);
1711 }
1712 else
1713 first_clone = node->next_clone;
1714
1715 first_clone->decl = copy_node (node->decl);
1716 node->next_clone = NULL;
1717 if (!flag_unit_at_a_time)
1718 node->inline_decl = first_clone->decl;
1719 first_clone->prev_clone = NULL;
1720 cgraph_insert_node_to_hashtable (first_clone);
1721 gcc_assert (first_clone == cgraph_node (first_clone->decl));
1722
1723 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1724 tree_function_versioning (node->decl, first_clone->decl, NULL, true);
1725
1726 DECL_EXTERNAL (first_clone->decl) = 0;
1727 DECL_ONE_ONLY (first_clone->decl) = 0;
1728 TREE_PUBLIC (first_clone->decl) = 0;
1729 DECL_COMDAT (first_clone->decl) = 0;
1730
1731 for (node = first_clone->next_clone; node; node = node->next_clone)
1732 node->decl = first_clone->decl;
1733 #ifdef ENABLE_CHECKING
1734 verify_cgraph_node (first_clone);
1735 #endif
1736 return first_clone;
1737 }
1738