[multiple changes]
[gcc.git] / gcc / cgraphunit.c
1 /* Callgraph based intraprocedural optimizations.
2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
21
22 /* This module implements main driver of compilation process as well as
23 few basic intraprocedural optimizers.
24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
35 (There is one exception needed for implementing GCC extern inline function.)
36
37 - cgraph_varpool_finalize_variable
38
39 This function has same behavior as the above but is used for static
40 variables.
41
42 - cgraph_finalize_compilation_unit
43
44 This function is called once compilation unit is finalized and it will
45 no longer change.
46
47 In the unit-at-a-time the call-graph construction and local function
48 analysis takes place here. Bodies of unreachable functions are released
49 to conserve memory usage.
50
51 ??? The compilation unit in this point of view should be compilation
52 unit as defined by the language - for instance C frontend allows multiple
53 compilation units to be parsed at once and it should call function each
54 time parsing is done so we save memory.
55
56 - cgraph_optimize
57
58 In this unit-at-a-time compilation the intra procedural analysis takes
59 place here. In particular the static functions whose address is never
60 taken are marked as local. Backend can then use this information to
61 modify calling conventions, do better inlining or similar optimizations.
62
63 - cgraph_assemble_pending_functions
64 - cgraph_varpool_assemble_pending_variables
65
66 In non-unit-at-a-time mode these functions can be used to force compilation
67 of functions or variables that are known to be needed at given stage
68 of compilation
69
70 - cgraph_mark_needed_node
71 - cgraph_varpool_mark_needed_node
72
73 When function or variable is referenced by some hidden way (for instance
74 via assembly code and marked by attribute "used"), the call-graph data structure
75 must be updated accordingly by this function.
76
77 - analyze_expr callback
78
79 This function is responsible for lowering tree nodes not understood by
80 generic code into understandable ones or alternatively marking
81 callgraph and varpool nodes referenced by the as needed.
82
83 ??? On the tree-ssa genericizing should take place here and we will avoid
84 need for these hooks (replacing them by genericizing hook)
85
86 - expand_function callback
87
88 This function is used to expand function and pass it into RTL back-end.
89 Front-end should not make any assumptions about when this function can be
90 called. In particular cgraph_assemble_pending_functions,
91 cgraph_varpool_assemble_pending_variables, cgraph_finalize_function,
92 cgraph_varpool_finalize_function, cgraph_optimize can cause arbitrarily
93 previously finalized functions to be expanded.
94
95 We implement two compilation modes.
96
97 - unit-at-a-time: In this mode analyzing of all functions is deferred
98 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
99
100 In cgraph_finalize_compilation_unit the reachable functions are
101 analyzed. During analysis the call-graph edges from reachable
102 functions are constructed and their destinations are marked as
103 reachable. References to functions and variables are discovered too
104 and variables found to be needed output to the assembly file. Via
105 mark_referenced call in assemble_variable functions referenced by
106 static variables are noticed too.
107
108 The intra-procedural information is produced and its existence
109 indicated by global_info_ready. Once this flag is set it is impossible
110 to change function from !reachable to reachable and thus
111 assemble_variable no longer call mark_referenced.
112
113 Finally the call-graph is topologically sorted and all reachable functions
114 that has not been completely inlined or are not external are output.
115
116 ??? It is possible that reference to function or variable is optimized
117 out. We can not deal with this nicely because topological order is not
118 suitable for it. For tree-ssa we may consider another pass doing
119 optimization and re-discovering reachable functions.
120
121 ??? Reorganize code so variables are output very last and only if they
122 really has been referenced by produced code, so we catch more cases
123 where reference has been optimized out.
124
125 - non-unit-at-a-time
126
127 All functions are variables are output as early as possible to conserve
128 memory consumption. This may or may not result in less memory used but
129 it is still needed for some legacy code that rely on particular ordering
130 of things output from the compiler.
131
132 Varpool data structures are not used and variables are output directly.
133
134 Functions are output early using call of
135 cgraph_assemble_pending_function from cgraph_finalize_function. The
136 decision on whether function is needed is made more conservative so
137 uninlininable static functions are needed too. During the call-graph
138 construction the edge destinations are not marked as reachable and it
139 is completely relied upn assemble_variable to mark them. */
140
141
142 #include "config.h"
143 #include "system.h"
144 #include "coretypes.h"
145 #include "tm.h"
146 #include "tree.h"
147 #include "rtl.h"
148 #include "tree-flow.h"
149 #include "tree-inline.h"
150 #include "langhooks.h"
151 #include "pointer-set.h"
152 #include "toplev.h"
153 #include "flags.h"
154 #include "ggc.h"
155 #include "debug.h"
156 #include "target.h"
157 #include "cgraph.h"
158 #include "diagnostic.h"
159 #include "timevar.h"
160 #include "params.h"
161 #include "fibheap.h"
162 #include "c-common.h"
163 #include "intl.h"
164 #include "function.h"
165 #include "ipa-prop.h"
166 #include "tree-gimple.h"
167 #include "tree-pass.h"
168 #include "output.h"
169
170 static void cgraph_expand_all_functions (void);
171 static void cgraph_mark_functions_to_output (void);
172 static void cgraph_expand_function (struct cgraph_node *);
173 static tree record_reference (tree *, int *, void *);
174
175 /* Records tree nodes seen in record_reference. Simply using
176 walk_tree_without_duplicates doesn't guarantee each node is visited
177 once because it gets a new htab upon each recursive call from
178 record_reference itself. */
179 static struct pointer_set_t *visited_nodes;
180
181 static FILE *cgraph_dump_file;
182
183 /* Determine if function DECL is needed. That is, visible to something
184 either outside this translation unit, something magic in the system
185 configury, or (if not doing unit-at-a-time) to something we havn't
186 seen yet. */
187
188 static bool
189 decide_is_function_needed (struct cgraph_node *node, tree decl)
190 {
191 tree origin;
192 if (MAIN_NAME_P (DECL_NAME (decl))
193 && TREE_PUBLIC (decl))
194 {
195 node->local.externally_visible = true;
196 return true;
197 }
198
199 /* If the user told us it is used, then it must be so. */
200 if (node->local.externally_visible
201 || lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
202 return true;
203
204 /* ??? If the assembler name is set by hand, it is possible to assemble
205 the name later after finalizing the function and the fact is noticed
206 in assemble_name then. This is arguably a bug. */
207 if (DECL_ASSEMBLER_NAME_SET_P (decl)
208 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
209 return true;
210
211 /* If we decided it was needed before, but at the time we didn't have
212 the body of the function available, then it's still needed. We have
213 to go back and re-check its dependencies now. */
214 if (node->needed)
215 return true;
216
217 /* Externally visible functions must be output. The exception is
218 COMDAT functions that must be output only when they are needed. */
219 if ((TREE_PUBLIC (decl) && !flag_whole_program)
220 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
221 return true;
222
223 /* Constructors and destructors are reachable from the runtime by
224 some mechanism. */
225 if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
226 return true;
227
228 if (flag_unit_at_a_time)
229 return false;
230
231 /* If not doing unit at a time, then we'll only defer this function
232 if its marked for inlining. Otherwise we want to emit it now. */
233
234 /* "extern inline" functions are never output locally. */
235 if (DECL_EXTERNAL (decl))
236 return false;
237 /* Nested functions of extern inline function shall not be emit unless
238 we inlined the origin. */
239 for (origin = decl_function_context (decl); origin;
240 origin = decl_function_context (origin))
241 if (DECL_EXTERNAL (origin))
242 return false;
243 /* We want to emit COMDAT functions only when absolutely necessary. */
244 if (DECL_COMDAT (decl))
245 return false;
246 if (!DECL_INLINE (decl)
247 || (!node->local.disregard_inline_limits
248 /* When declared inline, defer even the uninlinable functions.
249 This allows them to be eliminated when unused. */
250 && !DECL_DECLARED_INLINE_P (decl)
251 && (!node->local.inlinable || !cgraph_default_inline_p (node, NULL))))
252 return true;
253
254 return false;
255 }
256
257 /* Walk the decls we marked as necessary and see if they reference new
258 variables or functions and add them into the worklists. */
259 static bool
260 cgraph_varpool_analyze_pending_decls (void)
261 {
262 bool changed = false;
263 timevar_push (TV_CGRAPH);
264
265 while (cgraph_varpool_first_unanalyzed_node)
266 {
267 tree decl = cgraph_varpool_first_unanalyzed_node->decl;
268
269 cgraph_varpool_first_unanalyzed_node->analyzed = true;
270
271 cgraph_varpool_first_unanalyzed_node = cgraph_varpool_first_unanalyzed_node->next_needed;
272
273 if (DECL_INITIAL (decl))
274 {
275 visited_nodes = pointer_set_create ();
276 walk_tree (&DECL_INITIAL (decl), record_reference, NULL, visited_nodes);
277 pointer_set_destroy (visited_nodes);
278 visited_nodes = NULL;
279 }
280 changed = true;
281 }
282 timevar_pop (TV_CGRAPH);
283 return changed;
284 }
285
286 /* Optimization of function bodies might've rendered some variables as
287 unnecessary so we want to avoid these from being compiled.
288
289 This is done by pruning the queue and keeping only the variables that
290 really appear needed (ie they are either externally visible or referenced
291 by compiled function). Re-doing the reachability analysis on variables
292 brings back the remaining variables referenced by these. */
293 static void
294 cgraph_varpool_remove_unreferenced_decls (void)
295 {
296 struct cgraph_varpool_node *next, *node = cgraph_varpool_nodes_queue;
297
298 cgraph_varpool_reset_queue ();
299
300 if (errorcount || sorrycount)
301 return;
302
303 while (node)
304 {
305 tree decl = node->decl;
306 next = node->next_needed;
307 node->needed = 0;
308
309 if (node->finalized
310 && ((DECL_ASSEMBLER_NAME_SET_P (decl)
311 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
312 || node->force_output
313 || decide_is_variable_needed (node, decl)
314 /* ??? Cgraph does not yet rule the world with an iron hand,
315 and does not control the emission of debug information.
316 After a variable has its DECL_RTL set, we must assume that
317 it may be referenced by the debug information, and we can
318 no longer elide it. */
319 || DECL_RTL_SET_P (decl)))
320 cgraph_varpool_mark_needed_node (node);
321
322 node = next;
323 }
324 /* Make sure we mark alias targets as used targets. */
325 finish_aliases_1 ();
326 cgraph_varpool_analyze_pending_decls ();
327 }
328
329
330 /* When not doing unit-at-a-time, output all functions enqueued.
331 Return true when such a functions were found. */
332
333 bool
334 cgraph_assemble_pending_functions (void)
335 {
336 bool output = false;
337
338 if (flag_unit_at_a_time)
339 return false;
340
341 while (cgraph_nodes_queue)
342 {
343 struct cgraph_node *n = cgraph_nodes_queue;
344
345 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
346 n->next_needed = NULL;
347 if (!n->global.inlined_to
348 && !n->alias
349 && !DECL_EXTERNAL (n->decl))
350 {
351 cgraph_expand_function (n);
352 output = true;
353 }
354 }
355
356 return output;
357 }
358 /* As an GCC extension we allow redefinition of the function. The
359 semantics when both copies of bodies differ is not well defined.
360 We replace the old body with new body so in unit at a time mode
361 we always use new body, while in normal mode we may end up with
362 old body inlined into some functions and new body expanded and
363 inlined in others.
364
365 ??? It may make more sense to use one body for inlining and other
366 body for expanding the function but this is difficult to do. */
367
368 static void
369 cgraph_reset_node (struct cgraph_node *node)
370 {
371 /* If node->output is set, then this is a unit-at-a-time compilation
372 and we have already begun whole-unit analysis. This is *not*
373 testing for whether we've already emitted the function. That
374 case can be sort-of legitimately seen with real function
375 redefinition errors. I would argue that the front end should
376 never present us with such a case, but don't enforce that for now. */
377 gcc_assert (!node->output);
378
379 /* Reset our data structures so we can analyze the function again. */
380 memset (&node->local, 0, sizeof (node->local));
381 memset (&node->global, 0, sizeof (node->global));
382 memset (&node->rtl, 0, sizeof (node->rtl));
383 node->analyzed = false;
384 node->local.redefined_extern_inline = true;
385 node->local.finalized = false;
386
387 if (!flag_unit_at_a_time)
388 {
389 struct cgraph_node *n;
390
391 for (n = cgraph_nodes; n; n = n->next)
392 if (n->global.inlined_to == node)
393 cgraph_remove_node (n);
394 }
395
396 cgraph_node_remove_callees (node);
397
398 /* We may need to re-queue the node for assembling in case
399 we already proceeded it and ignored as not needed. */
400 if (node->reachable && !flag_unit_at_a_time)
401 {
402 struct cgraph_node *n;
403
404 for (n = cgraph_nodes_queue; n; n = n->next_needed)
405 if (n == node)
406 break;
407 if (!n)
408 node->reachable = 0;
409 }
410 }
411
412 static void
413 cgraph_lower_function (struct cgraph_node *node)
414 {
415 if (node->lowered)
416 return;
417 tree_lowering_passes (node->decl);
418 node->lowered = true;
419 }
420
421 static void
422 cgraph_finalize_pending_functions (void)
423 {
424 struct cgraph_node *next, *node = cgraph_analyze_queue;
425
426 cgraph_analyze_queue = NULL;
427 for (; node ; node = next)
428 {
429 next = node->next_needed;
430 node->next_needed = NULL;
431 cgraph_finalize_function (node->decl, true);
432 }
433 }
434
435 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
436 logic in effect. If NESTED is true, then our caller cannot stand to have
437 the garbage collector run at the moment. We would need to either create
438 a new GC context, or just not compile right now. */
439
440 void
441 cgraph_finalize_function (tree decl, bool nested)
442 {
443 struct cgraph_node *node = cgraph_node (decl);
444
445 if (node->local.finalized)
446 cgraph_reset_node (node);
447
448 notice_global_symbol (decl);
449 node->decl = decl;
450 node->local.finalized = true;
451 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
452 if (node->nested)
453 lower_nested_functions (decl);
454 gcc_assert (!node->nested);
455
456 /* If not unit at a time, then we need to create the call graph
457 now, so that called functions can be queued and emitted now. */
458 if (!flag_unit_at_a_time)
459 {
460 cgraph_analyze_function (node);
461 cgraph_finalize_pending_functions ();
462 cgraph_decide_inlining_incrementally (node, false);
463 }
464
465 if (decide_is_function_needed (node, decl))
466 cgraph_mark_needed_node (node);
467
468 /* Since we reclaim unreachable nodes at the end of every language
469 level unit, we need to be conservative about possible entry points
470 there. */
471 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)))
472 cgraph_mark_reachable_node (node);
473
474 /* If not unit at a time, go ahead and emit everything we've found
475 to be reachable at this time. */
476 if (!nested)
477 {
478 if (!cgraph_assemble_pending_functions ())
479 ggc_collect ();
480 }
481
482 /* If we've not yet emitted decl, tell the debug info about it. */
483 if (!TREE_ASM_WRITTEN (decl))
484 (*debug_hooks->deferred_inline_function) (decl);
485
486 /* Possibly warn about unused parameters. */
487 if (warn_unused_parameter)
488 do_warn_unused_parameter (decl);
489 }
490
491 /* Walk tree and record all calls. Called via walk_tree. */
492 static tree
493 record_reference (tree *tp, int *walk_subtrees, void *data)
494 {
495 tree t = *tp;
496
497 switch (TREE_CODE (t))
498 {
499 case VAR_DECL:
500 /* ??? Really, we should mark this decl as *potentially* referenced
501 by this function and re-examine whether the decl is actually used
502 after rtl has been generated. */
503 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
504 {
505 cgraph_varpool_mark_needed_node (cgraph_varpool_node (t));
506 if (lang_hooks.callgraph.analyze_expr)
507 return lang_hooks.callgraph.analyze_expr (tp, walk_subtrees,
508 data);
509 }
510 break;
511
512 case FDESC_EXPR:
513 case ADDR_EXPR:
514 if (flag_unit_at_a_time)
515 {
516 /* Record dereferences to the functions. This makes the
517 functions reachable unconditionally. */
518 tree decl = TREE_OPERAND (*tp, 0);
519 if (TREE_CODE (decl) == FUNCTION_DECL)
520 cgraph_mark_needed_node (cgraph_node (decl));
521 }
522 break;
523
524 default:
525 /* Save some cycles by not walking types and declaration as we
526 won't find anything useful there anyway. */
527 if (IS_TYPE_OR_DECL_P (*tp))
528 {
529 *walk_subtrees = 0;
530 break;
531 }
532
533 if ((unsigned int) TREE_CODE (t) >= LAST_AND_UNUSED_TREE_CODE)
534 return lang_hooks.callgraph.analyze_expr (tp, walk_subtrees, data);
535 break;
536 }
537
538 return NULL;
539 }
540
541 /* Create cgraph edges for function calls inside BODY from NODE. */
542
543 static void
544 cgraph_create_edges (struct cgraph_node *node, tree body)
545 {
546 basic_block bb;
547
548 struct function *this_cfun = DECL_STRUCT_FUNCTION (body);
549 block_stmt_iterator bsi;
550 tree step;
551 visited_nodes = pointer_set_create ();
552
553 /* Reach the trees by walking over the CFG, and note the
554 enclosing basic-blocks in the call edges. */
555 FOR_EACH_BB_FN (bb, this_cfun)
556 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
557 {
558 tree stmt = bsi_stmt (bsi);
559 tree call = get_call_expr_in (stmt);
560 tree decl;
561
562 if (call && (decl = get_callee_fndecl (call)))
563 {
564 cgraph_create_edge (node, cgraph_node (decl), stmt,
565 bb->count,
566 bb->loop_depth);
567 walk_tree (&TREE_OPERAND (call, 1),
568 record_reference, node, visited_nodes);
569 if (TREE_CODE (stmt) == MODIFY_EXPR)
570 walk_tree (&TREE_OPERAND (stmt, 0),
571 record_reference, node, visited_nodes);
572 }
573 else
574 walk_tree (bsi_stmt_ptr (bsi), record_reference, node, visited_nodes);
575 }
576
577 /* Look for initializers of constant variables and private statics. */
578 for (step = DECL_STRUCT_FUNCTION (body)->unexpanded_var_list;
579 step;
580 step = TREE_CHAIN (step))
581 {
582 tree decl = TREE_VALUE (step);
583 if (TREE_CODE (decl) == VAR_DECL
584 && (TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
585 && flag_unit_at_a_time)
586 cgraph_varpool_finalize_decl (decl);
587 else if (TREE_CODE (decl) == VAR_DECL && DECL_INITIAL (decl))
588 walk_tree (&DECL_INITIAL (decl), record_reference, node, visited_nodes);
589 }
590
591 pointer_set_destroy (visited_nodes);
592 visited_nodes = NULL;
593 }
594
595 /* Give initial reasons why inlining would fail. Those gets
596 either NULLified or usually overwritten by more precise reason
597 later. */
598 static void
599 initialize_inline_failed (struct cgraph_node *node)
600 {
601 struct cgraph_edge *e;
602
603 for (e = node->callers; e; e = e->next_caller)
604 {
605 gcc_assert (!e->callee->global.inlined_to);
606 gcc_assert (e->inline_failed);
607 if (node->local.redefined_extern_inline)
608 e->inline_failed = N_("redefined extern inline functions are not "
609 "considered for inlining");
610 else if (!node->local.inlinable)
611 e->inline_failed = N_("function not inlinable");
612 else
613 e->inline_failed = N_("function not considered for inlining");
614 }
615 }
616
617 /* Rebuild call edges from current function after a passes not aware
618 of cgraph updating. */
619 static void
620 rebuild_cgraph_edges (void)
621 {
622 basic_block bb;
623 struct cgraph_node *node = cgraph_node (current_function_decl);
624 block_stmt_iterator bsi;
625
626 cgraph_node_remove_callees (node);
627
628 node->count = ENTRY_BLOCK_PTR->count;
629
630 FOR_EACH_BB (bb)
631 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
632 {
633 tree stmt = bsi_stmt (bsi);
634 tree call = get_call_expr_in (stmt);
635 tree decl;
636
637 if (call && (decl = get_callee_fndecl (call)))
638 cgraph_create_edge (node, cgraph_node (decl), stmt,
639 bb->count,
640 bb->loop_depth);
641 }
642 initialize_inline_failed (node);
643 gcc_assert (!node->global.inlined_to);
644 }
645
646 struct tree_opt_pass pass_rebuild_cgraph_edges =
647 {
648 NULL, /* name */
649 NULL, /* gate */
650 rebuild_cgraph_edges, /* execute */
651 NULL, /* sub */
652 NULL, /* next */
653 0, /* static_pass_number */
654 0, /* tv_id */
655 PROP_cfg, /* properties_required */
656 0, /* properties_provided */
657 0, /* properties_destroyed */
658 0, /* todo_flags_start */
659 0, /* todo_flags_finish */
660 0 /* letter */
661 };
662
663 /* Verify cgraph nodes of given cgraph node. */
664 void
665 verify_cgraph_node (struct cgraph_node *node)
666 {
667 struct cgraph_edge *e;
668 struct cgraph_node *main_clone;
669 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
670 basic_block this_block;
671 block_stmt_iterator bsi;
672 bool error_found = false;
673
674 timevar_push (TV_CGRAPH_VERIFY);
675 for (e = node->callees; e; e = e->next_callee)
676 if (e->aux)
677 {
678 error ("aux field set for edge %s->%s",
679 cgraph_node_name (e->caller), cgraph_node_name (e->callee));
680 error_found = true;
681 }
682 if (node->count < 0)
683 {
684 error ("Execution count is negative");
685 error_found = true;
686 }
687 for (e = node->callers; e; e = e->next_caller)
688 {
689 if (e->count < 0)
690 {
691 error ("caller edge count is negative");
692 error_found = true;
693 }
694 if (!e->inline_failed)
695 {
696 if (node->global.inlined_to
697 != (e->caller->global.inlined_to
698 ? e->caller->global.inlined_to : e->caller))
699 {
700 error ("inlined_to pointer is wrong");
701 error_found = true;
702 }
703 if (node->callers->next_caller)
704 {
705 error ("multiple inline callers");
706 error_found = true;
707 }
708 }
709 else
710 if (node->global.inlined_to)
711 {
712 error ("inlined_to pointer set for noninline callers");
713 error_found = true;
714 }
715 }
716 if (!node->callers && node->global.inlined_to)
717 {
718 error ("inlined_to pointer is set but no predecesors found");
719 error_found = true;
720 }
721 if (node->global.inlined_to == node)
722 {
723 error ("inlined_to pointer refers to itself");
724 error_found = true;
725 }
726
727 for (main_clone = cgraph_node (node->decl); main_clone;
728 main_clone = main_clone->next_clone)
729 if (main_clone == node)
730 break;
731 if (!node)
732 {
733 error ("node not found in DECL_ASSEMBLER_NAME hash");
734 error_found = true;
735 }
736
737 if (node->analyzed
738 && DECL_SAVED_TREE (node->decl) && !TREE_ASM_WRITTEN (node->decl)
739 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to))
740 {
741 if (this_cfun->cfg)
742 {
743 /* The nodes we're interested in are never shared, so walk
744 the tree ignoring duplicates. */
745 visited_nodes = pointer_set_create ();
746 /* Reach the trees by walking over the CFG, and note the
747 enclosing basic-blocks in the call edges. */
748 FOR_EACH_BB_FN (this_block, this_cfun)
749 for (bsi = bsi_start (this_block); !bsi_end_p (bsi); bsi_next (&bsi))
750 {
751 tree stmt = bsi_stmt (bsi);
752 tree call = get_call_expr_in (stmt);
753 tree decl;
754 if (call && (decl = get_callee_fndecl (call)))
755 {
756 struct cgraph_edge *e = cgraph_edge (node, stmt);
757 if (e)
758 {
759 if (e->aux)
760 {
761 error ("shared call_stmt:");
762 debug_generic_stmt (stmt);
763 error_found = true;
764 }
765 if (e->callee->decl != cgraph_node (decl)->decl
766 && e->inline_failed)
767 {
768 error ("edge points to wrong declaration:");
769 debug_tree (e->callee->decl);
770 fprintf (stderr," Instead of:");
771 debug_tree (decl);
772 }
773 e->aux = (void *)1;
774 }
775 else
776 {
777 error ("missing callgraph edge for call stmt:");
778 debug_generic_stmt (stmt);
779 error_found = true;
780 }
781 }
782 }
783 pointer_set_destroy (visited_nodes);
784 visited_nodes = NULL;
785 }
786 else
787 /* No CFG available?! */
788 gcc_unreachable ();
789
790 for (e = node->callees; e; e = e->next_callee)
791 {
792 if (!e->aux)
793 {
794 error ("edge %s->%s has no corresponding call_stmt",
795 cgraph_node_name (e->caller),
796 cgraph_node_name (e->callee));
797 debug_generic_stmt (e->call_stmt);
798 error_found = true;
799 }
800 e->aux = 0;
801 }
802 }
803 if (error_found)
804 {
805 dump_cgraph_node (stderr, node);
806 internal_error ("verify_cgraph_node failed");
807 }
808 timevar_pop (TV_CGRAPH_VERIFY);
809 }
810
811 /* Verify whole cgraph structure. */
812 void
813 verify_cgraph (void)
814 {
815 struct cgraph_node *node;
816
817 if (sorrycount || errorcount)
818 return;
819
820 for (node = cgraph_nodes; node; node = node->next)
821 verify_cgraph_node (node);
822 }
823
824 /* Output one variable, if necessary. Return whether we output it. */
825 static bool
826 cgraph_varpool_assemble_decl (struct cgraph_varpool_node *node)
827 {
828 tree decl = node->decl;
829
830 if (!TREE_ASM_WRITTEN (decl)
831 && !node->alias
832 && !DECL_EXTERNAL (decl)
833 && (TREE_CODE (decl) != VAR_DECL || !DECL_HAS_VALUE_EXPR_P (decl)))
834 {
835 assemble_variable (decl, 0, 1, 0);
836 /* Local static variables are never seen by check_global_declarations
837 so we need to output debug info by hand. */
838 if (DECL_CONTEXT (decl)
839 && (TREE_CODE (DECL_CONTEXT (decl)) == BLOCK
840 || TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
841 && errorcount == 0 && sorrycount == 0)
842 {
843 timevar_push (TV_SYMOUT);
844 (*debug_hooks->global_decl) (decl);
845 timevar_pop (TV_SYMOUT);
846 }
847 return true;
848 }
849
850 return false;
851 }
852
853 /* Output all variables enqueued to be assembled. */
854 bool
855 cgraph_varpool_assemble_pending_decls (void)
856 {
857 bool changed = false;
858
859 if (errorcount || sorrycount)
860 return false;
861
862 /* EH might mark decls as needed during expansion. This should be safe since
863 we don't create references to new function, but it should not be used
864 elsewhere. */
865 cgraph_varpool_analyze_pending_decls ();
866
867 while (cgraph_varpool_nodes_queue)
868 {
869 struct cgraph_varpool_node *node = cgraph_varpool_nodes_queue;
870
871 cgraph_varpool_nodes_queue = cgraph_varpool_nodes_queue->next_needed;
872 if (cgraph_varpool_assemble_decl (node))
873 changed = true;
874 node->next_needed = NULL;
875 }
876 return changed;
877 }
878
879 /* Output all asm statements we have stored up to be output. */
880
881 static void
882 cgraph_output_pending_asms (void)
883 {
884 struct cgraph_asm_node *can;
885
886 if (errorcount || sorrycount)
887 return;
888
889 for (can = cgraph_asm_nodes; can; can = can->next)
890 assemble_asm (can->asm_str);
891 cgraph_asm_nodes = NULL;
892 }
893
894 /* Analyze the function scheduled to be output. */
895 void
896 cgraph_analyze_function (struct cgraph_node *node)
897 {
898 tree decl = node->decl;
899
900 current_function_decl = decl;
901 push_cfun (DECL_STRUCT_FUNCTION (decl));
902 cgraph_lower_function (node);
903
904 /* First kill forward declaration so reverse inlining works properly. */
905 cgraph_create_edges (node, decl);
906
907 node->local.inlinable = tree_inlinable_function_p (decl);
908 node->local.self_insns = estimate_num_insns (decl);
909 if (node->local.inlinable)
910 node->local.disregard_inline_limits
911 = lang_hooks.tree_inlining.disregard_inline_limits (decl);
912 initialize_inline_failed (node);
913 if (flag_really_no_inline && !node->local.disregard_inline_limits)
914 node->local.inlinable = 0;
915 /* Inlining characteristics are maintained by the cgraph_mark_inline. */
916 node->global.insns = node->local.self_insns;
917
918 node->analyzed = true;
919 pop_cfun ();
920 current_function_decl = NULL;
921 }
922
923 /* Analyze the whole compilation unit once it is parsed completely. */
924
925 void
926 cgraph_finalize_compilation_unit (void)
927 {
928 struct cgraph_node *node;
929 /* Keep track of already processed nodes when called multiple times for
930 intermodule optimization. */
931 static struct cgraph_node *first_analyzed;
932
933 finish_aliases_1 ();
934
935 if (!flag_unit_at_a_time)
936 {
937 cgraph_output_pending_asms ();
938 cgraph_assemble_pending_functions ();
939 return;
940 }
941
942 if (!quiet_flag)
943 {
944 fprintf (stderr, "\nAnalyzing compilation unit");
945 fflush (stderr);
946 }
947
948 timevar_push (TV_CGRAPH);
949 cgraph_varpool_analyze_pending_decls ();
950 if (cgraph_dump_file)
951 {
952 fprintf (cgraph_dump_file, "Initial entry points:");
953 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
954 if (node->needed && DECL_SAVED_TREE (node->decl))
955 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
956 fprintf (cgraph_dump_file, "\n");
957 }
958
959 /* Propagate reachability flag and lower representation of all reachable
960 functions. In the future, lowering will introduce new functions and
961 new entry points on the way (by template instantiation and virtual
962 method table generation for instance). */
963 while (cgraph_nodes_queue)
964 {
965 struct cgraph_edge *edge;
966 tree decl = cgraph_nodes_queue->decl;
967
968 node = cgraph_nodes_queue;
969 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
970 node->next_needed = NULL;
971
972 /* ??? It is possible to create extern inline function and later using
973 weak alias attribute to kill its body. See
974 gcc.c-torture/compile/20011119-1.c */
975 if (!DECL_SAVED_TREE (decl))
976 {
977 cgraph_reset_node (node);
978 continue;
979 }
980
981 gcc_assert (!node->analyzed && node->reachable);
982 gcc_assert (DECL_SAVED_TREE (decl));
983
984 cgraph_analyze_function (node);
985 cgraph_finalize_pending_functions ();
986
987 for (edge = node->callees; edge; edge = edge->next_callee)
988 if (!edge->callee->reachable)
989 cgraph_mark_reachable_node (edge->callee);
990
991 cgraph_varpool_analyze_pending_decls ();
992 }
993
994 /* Collect entry points to the unit. */
995
996 if (cgraph_dump_file)
997 {
998 fprintf (cgraph_dump_file, "Unit entry points:");
999 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1000 if (node->needed && DECL_SAVED_TREE (node->decl))
1001 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1002 fprintf (cgraph_dump_file, "\n\nInitial ");
1003 dump_cgraph (cgraph_dump_file);
1004 }
1005
1006 if (cgraph_dump_file)
1007 fprintf (cgraph_dump_file, "\nReclaiming functions:");
1008
1009 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1010 {
1011 tree decl = node->decl;
1012
1013 if (node->local.finalized && !DECL_SAVED_TREE (decl))
1014 cgraph_reset_node (node);
1015
1016 if (!node->reachable && DECL_SAVED_TREE (decl))
1017 {
1018 if (cgraph_dump_file)
1019 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1020 cgraph_remove_node (node);
1021 continue;
1022 }
1023 else
1024 node->next_needed = NULL;
1025 gcc_assert (!node->local.finalized || DECL_SAVED_TREE (decl));
1026 gcc_assert (node->analyzed == node->local.finalized);
1027 }
1028 if (cgraph_dump_file)
1029 {
1030 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1031 dump_cgraph (cgraph_dump_file);
1032 }
1033 first_analyzed = cgraph_nodes;
1034 ggc_collect ();
1035 timevar_pop (TV_CGRAPH);
1036 }
1037 /* Figure out what functions we want to assemble. */
1038
1039 static void
1040 cgraph_mark_functions_to_output (void)
1041 {
1042 struct cgraph_node *node;
1043
1044 for (node = cgraph_nodes; node; node = node->next)
1045 {
1046 tree decl = node->decl;
1047 struct cgraph_edge *e;
1048
1049 gcc_assert (!node->output);
1050
1051 for (e = node->callers; e; e = e->next_caller)
1052 if (e->inline_failed)
1053 break;
1054
1055 /* We need to output all local functions that are used and not
1056 always inlined, as well as those that are reachable from
1057 outside the current compilation unit. */
1058 if (DECL_SAVED_TREE (decl)
1059 && !node->global.inlined_to
1060 && (node->needed
1061 || (e && node->reachable))
1062 && !TREE_ASM_WRITTEN (decl)
1063 && !DECL_EXTERNAL (decl))
1064 node->output = 1;
1065 else
1066 {
1067 /* We should've reclaimed all functions that are not needed. */
1068 #ifdef ENABLE_CHECKING
1069 if (!node->global.inlined_to && DECL_SAVED_TREE (decl)
1070 && !DECL_EXTERNAL (decl))
1071 {
1072 dump_cgraph_node (stderr, node);
1073 internal_error ("failed to reclaim unneeded function");
1074 }
1075 #endif
1076 gcc_assert (node->global.inlined_to || !DECL_SAVED_TREE (decl)
1077 || DECL_EXTERNAL (decl));
1078
1079 }
1080
1081 }
1082 }
1083
1084 /* Expand function specified by NODE. */
1085
1086 static void
1087 cgraph_expand_function (struct cgraph_node *node)
1088 {
1089 tree decl = node->decl;
1090
1091 /* We ought to not compile any inline clones. */
1092 gcc_assert (!node->global.inlined_to);
1093
1094 if (flag_unit_at_a_time)
1095 announce_function (decl);
1096
1097 cgraph_lower_function (node);
1098
1099 /* Generate RTL for the body of DECL. */
1100 lang_hooks.callgraph.expand_function (decl);
1101
1102 /* Make sure that BE didn't give up on compiling. */
1103 /* ??? Can happen with nested function of extern inline. */
1104 gcc_assert (TREE_ASM_WRITTEN (node->decl));
1105
1106 current_function_decl = NULL;
1107 if (!cgraph_preserve_function_body_p (node->decl))
1108 {
1109 DECL_SAVED_TREE (node->decl) = NULL;
1110 DECL_STRUCT_FUNCTION (node->decl) = NULL;
1111 DECL_INITIAL (node->decl) = error_mark_node;
1112 /* Eliminate all call edges. This is important so the call_expr no longer
1113 points to the dead function body. */
1114 cgraph_node_remove_callees (node);
1115 }
1116
1117 cgraph_function_flags_ready = true;
1118 }
1119
1120 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
1121
1122 bool
1123 cgraph_inline_p (struct cgraph_edge *e, const char **reason)
1124 {
1125 *reason = e->inline_failed;
1126 return !e->inline_failed;
1127 }
1128
1129
1130
1131 /* Expand all functions that must be output.
1132
1133 Attempt to topologically sort the nodes so function is output when
1134 all called functions are already assembled to allow data to be
1135 propagated across the callgraph. Use a stack to get smaller distance
1136 between a function and its callees (later we may choose to use a more
1137 sophisticated algorithm for function reordering; we will likely want
1138 to use subsections to make the output functions appear in top-down
1139 order). */
1140
1141 static void
1142 cgraph_expand_all_functions (void)
1143 {
1144 struct cgraph_node *node;
1145 struct cgraph_node **order =
1146 xcalloc (cgraph_n_nodes, sizeof (struct cgraph_node *));
1147 int order_pos = 0, new_order_pos = 0;
1148 int i;
1149
1150 order_pos = cgraph_postorder (order);
1151 gcc_assert (order_pos == cgraph_n_nodes);
1152
1153 /* Garbage collector may remove inline clones we eliminate during
1154 optimization. So we must be sure to not reference them. */
1155 for (i = 0; i < order_pos; i++)
1156 if (order[i]->output)
1157 order[new_order_pos++] = order[i];
1158
1159 for (i = new_order_pos - 1; i >= 0; i--)
1160 {
1161 node = order[i];
1162 if (node->output)
1163 {
1164 gcc_assert (node->reachable);
1165 node->output = 0;
1166 cgraph_expand_function (node);
1167 }
1168 }
1169 free (order);
1170 }
1171
1172 /* This is used to sort the node types by the cgraph order number. */
1173
1174 struct cgraph_order_sort
1175 {
1176 enum { ORDER_UNDEFINED = 0, ORDER_FUNCTION, ORDER_VAR, ORDER_ASM } kind;
1177 union
1178 {
1179 struct cgraph_node *f;
1180 struct cgraph_varpool_node *v;
1181 struct cgraph_asm_node *a;
1182 } u;
1183 };
1184
1185 /* Output all functions, variables, and asm statements in the order
1186 according to their order fields, which is the order in which they
1187 appeared in the file. This implements -fno-toplevel-reorder. In
1188 this mode we may output functions and variables which don't really
1189 need to be output. */
1190
1191 static void
1192 cgraph_output_in_order (void)
1193 {
1194 int max;
1195 size_t size;
1196 struct cgraph_order_sort *nodes;
1197 int i;
1198 struct cgraph_node *pf;
1199 struct cgraph_varpool_node *pv;
1200 struct cgraph_asm_node *pa;
1201
1202 max = cgraph_order;
1203 size = max * sizeof (struct cgraph_order_sort);
1204 nodes = (struct cgraph_order_sort *) alloca (size);
1205 memset (nodes, 0, size);
1206
1207 cgraph_varpool_analyze_pending_decls ();
1208
1209 for (pf = cgraph_nodes; pf; pf = pf->next)
1210 {
1211 if (pf->output)
1212 {
1213 i = pf->order;
1214 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1215 nodes[i].kind = ORDER_FUNCTION;
1216 nodes[i].u.f = pf;
1217 }
1218 }
1219
1220 for (pv = cgraph_varpool_nodes_queue; pv; pv = pv->next_needed)
1221 {
1222 i = pv->order;
1223 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1224 nodes[i].kind = ORDER_VAR;
1225 nodes[i].u.v = pv;
1226 }
1227
1228 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1229 {
1230 i = pa->order;
1231 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1232 nodes[i].kind = ORDER_ASM;
1233 nodes[i].u.a = pa;
1234 }
1235 cgraph_asm_nodes = NULL;
1236
1237 for (i = 0; i < max; ++i)
1238 {
1239 switch (nodes[i].kind)
1240 {
1241 case ORDER_FUNCTION:
1242 nodes[i].u.f->output = 0;
1243 cgraph_expand_function (nodes[i].u.f);
1244 break;
1245
1246 case ORDER_VAR:
1247 cgraph_varpool_assemble_decl (nodes[i].u.v);
1248 break;
1249
1250 case ORDER_ASM:
1251 assemble_asm (nodes[i].u.a->asm_str);
1252 break;
1253
1254 case ORDER_UNDEFINED:
1255 break;
1256
1257 default:
1258 gcc_unreachable ();
1259 }
1260 }
1261 }
1262
1263 /* Mark visibility of all functions.
1264
1265 A local function is one whose calls can occur only in the current
1266 compilation unit and all its calls are explicit, so we can change
1267 its calling convention. We simply mark all static functions whose
1268 address is not taken as local.
1269
1270 We also change the TREE_PUBLIC flag of all declarations that are public
1271 in language point of view but we want to overwrite this default
1272 via visibilities for the backend point of view. */
1273
1274 static void
1275 cgraph_function_and_variable_visibility (void)
1276 {
1277 struct cgraph_node *node;
1278 struct cgraph_varpool_node *vnode;
1279
1280 for (node = cgraph_nodes; node; node = node->next)
1281 {
1282 if (node->reachable
1283 && (DECL_COMDAT (node->decl)
1284 || (!flag_whole_program
1285 && TREE_PUBLIC (node->decl) && !DECL_EXTERNAL (node->decl))))
1286 node->local.externally_visible = true;
1287 if (!node->local.externally_visible && node->analyzed
1288 && !DECL_EXTERNAL (node->decl))
1289 {
1290 gcc_assert (flag_whole_program || !TREE_PUBLIC (node->decl));
1291 TREE_PUBLIC (node->decl) = 0;
1292 }
1293 node->local.local = (!node->needed
1294 && node->analyzed
1295 && !DECL_EXTERNAL (node->decl)
1296 && !node->local.externally_visible);
1297 }
1298 for (vnode = cgraph_varpool_nodes_queue; vnode; vnode = vnode->next_needed)
1299 {
1300 if (vnode->needed
1301 && !flag_whole_program
1302 && (DECL_COMDAT (vnode->decl) || TREE_PUBLIC (vnode->decl)))
1303 vnode->externally_visible = 1;
1304 if (!vnode->externally_visible)
1305 {
1306 gcc_assert (flag_whole_program || !TREE_PUBLIC (vnode->decl));
1307 TREE_PUBLIC (vnode->decl) = 0;
1308 }
1309 gcc_assert (TREE_STATIC (vnode->decl));
1310 }
1311
1312 /* Because we have to be conservative on the boundaries of source
1313 level units, it is possible that we marked some functions in
1314 reachable just because they might be used later via external
1315 linkage, but after making them local they are really unreachable
1316 now. */
1317 cgraph_remove_unreachable_nodes (true, cgraph_dump_file);
1318
1319 if (cgraph_dump_file)
1320 {
1321 fprintf (cgraph_dump_file, "\nMarking local functions:");
1322 for (node = cgraph_nodes; node; node = node->next)
1323 if (node->local.local)
1324 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1325 fprintf (cgraph_dump_file, "\n\n");
1326 fprintf (cgraph_dump_file, "\nMarking externally visible functions:");
1327 for (node = cgraph_nodes; node; node = node->next)
1328 if (node->local.externally_visible)
1329 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1330 fprintf (cgraph_dump_file, "\n\n");
1331 }
1332 cgraph_function_flags_ready = true;
1333 }
1334
1335 /* Return true when function body of DECL still needs to be kept around
1336 for later re-use. */
1337 bool
1338 cgraph_preserve_function_body_p (tree decl)
1339 {
1340 struct cgraph_node *node;
1341 if (!cgraph_global_info_ready)
1342 return (DECL_INLINE (decl) && !flag_really_no_inline);
1343 /* Look if there is any clone around. */
1344 for (node = cgraph_node (decl); node; node = node->next_clone)
1345 if (node->global.inlined_to)
1346 return true;
1347 return false;
1348 }
1349
1350 static void
1351 ipa_passes (void)
1352 {
1353 cfun = NULL;
1354 tree_register_cfg_hooks ();
1355 bitmap_obstack_initialize (NULL);
1356 execute_ipa_pass_list (all_ipa_passes);
1357 bitmap_obstack_release (NULL);
1358 }
1359
1360 /* Perform simple optimizations based on callgraph. */
1361
1362 void
1363 cgraph_optimize (void)
1364 {
1365 #ifdef ENABLE_CHECKING
1366 verify_cgraph ();
1367 #endif
1368 if (!flag_unit_at_a_time)
1369 {
1370 cgraph_output_pending_asms ();
1371 cgraph_varpool_assemble_pending_decls ();
1372 return;
1373 }
1374
1375 process_pending_assemble_externals ();
1376
1377 /* Frontend may output common variables after the unit has been finalized.
1378 It is safe to deal with them here as they are always zero initialized. */
1379 cgraph_varpool_analyze_pending_decls ();
1380
1381 timevar_push (TV_CGRAPHOPT);
1382 if (!quiet_flag)
1383 fprintf (stderr, "Performing intraprocedural optimizations\n");
1384
1385 cgraph_function_and_variable_visibility ();
1386 if (cgraph_dump_file)
1387 {
1388 fprintf (cgraph_dump_file, "Marked ");
1389 dump_cgraph (cgraph_dump_file);
1390 }
1391 ipa_passes ();
1392 /* This pass remove bodies of extern inline functions we never inlined.
1393 Do this later so other IPA passes see what is really going on. */
1394 cgraph_remove_unreachable_nodes (false, dump_file);
1395 cgraph_global_info_ready = true;
1396 if (cgraph_dump_file)
1397 {
1398 fprintf (cgraph_dump_file, "Optimized ");
1399 dump_cgraph (cgraph_dump_file);
1400 dump_varpool (cgraph_dump_file);
1401 }
1402 timevar_pop (TV_CGRAPHOPT);
1403
1404 /* Output everything. */
1405 if (!quiet_flag)
1406 fprintf (stderr, "Assembling functions:\n");
1407 #ifdef ENABLE_CHECKING
1408 verify_cgraph ();
1409 #endif
1410
1411 cgraph_mark_functions_to_output ();
1412
1413 if (!flag_toplevel_reorder)
1414 cgraph_output_in_order ();
1415 else
1416 {
1417 cgraph_output_pending_asms ();
1418
1419 cgraph_expand_all_functions ();
1420 cgraph_varpool_remove_unreferenced_decls ();
1421
1422 cgraph_varpool_assemble_pending_decls ();
1423 }
1424
1425 if (cgraph_dump_file)
1426 {
1427 fprintf (cgraph_dump_file, "\nFinal ");
1428 dump_cgraph (cgraph_dump_file);
1429 }
1430 #ifdef ENABLE_CHECKING
1431 verify_cgraph ();
1432 /* Double check that all inline clones are gone and that all
1433 function bodies have been released from memory. */
1434 if (flag_unit_at_a_time
1435 && !dump_enabled_p (TDI_tree_all)
1436 && !(sorrycount || errorcount))
1437 {
1438 struct cgraph_node *node;
1439 bool error_found = false;
1440
1441 for (node = cgraph_nodes; node; node = node->next)
1442 if (node->analyzed
1443 && (node->global.inlined_to
1444 || DECL_SAVED_TREE (node->decl)))
1445 {
1446 error_found = true;
1447 dump_cgraph_node (stderr, node);
1448 }
1449 if (error_found)
1450 internal_error ("nodes with no released memory found");
1451 }
1452 #endif
1453 }
1454
1455 /* Generate and emit a static constructor or destructor. WHICH must be
1456 one of 'I' or 'D'. BODY should be a STATEMENT_LIST containing
1457 GENERIC statements. */
1458
1459 void
1460 cgraph_build_static_cdtor (char which, tree body, int priority)
1461 {
1462 static int counter = 0;
1463 char which_buf[16];
1464 tree decl, name, resdecl;
1465
1466 sprintf (which_buf, "%c_%d", which, counter++);
1467 name = get_file_function_name_long (which_buf);
1468
1469 decl = build_decl (FUNCTION_DECL, name,
1470 build_function_type (void_type_node, void_list_node));
1471 current_function_decl = decl;
1472
1473 resdecl = build_decl (RESULT_DECL, NULL_TREE, void_type_node);
1474 DECL_ARTIFICIAL (resdecl) = 1;
1475 DECL_IGNORED_P (resdecl) = 1;
1476 DECL_RESULT (decl) = resdecl;
1477
1478 allocate_struct_function (decl);
1479
1480 TREE_STATIC (decl) = 1;
1481 TREE_USED (decl) = 1;
1482 DECL_ARTIFICIAL (decl) = 1;
1483 DECL_IGNORED_P (decl) = 1;
1484 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1485 DECL_SAVED_TREE (decl) = body;
1486 TREE_PUBLIC (decl) = ! targetm.have_ctors_dtors;
1487 DECL_UNINLINABLE (decl) = 1;
1488
1489 DECL_INITIAL (decl) = make_node (BLOCK);
1490 TREE_USED (DECL_INITIAL (decl)) = 1;
1491
1492 DECL_SOURCE_LOCATION (decl) = input_location;
1493 cfun->function_end_locus = input_location;
1494
1495 switch (which)
1496 {
1497 case 'I':
1498 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1499 break;
1500 case 'D':
1501 DECL_STATIC_DESTRUCTOR (decl) = 1;
1502 break;
1503 default:
1504 gcc_unreachable ();
1505 }
1506
1507 gimplify_function_tree (decl);
1508
1509 /* ??? We will get called LATE in the compilation process. */
1510 if (cgraph_global_info_ready)
1511 {
1512 tree_lowering_passes (decl);
1513 tree_rest_of_compilation (decl);
1514 }
1515 else
1516 cgraph_finalize_function (decl, 0);
1517
1518 if (targetm.have_ctors_dtors)
1519 {
1520 void (*fn) (rtx, int);
1521
1522 if (which == 'I')
1523 fn = targetm.asm_out.constructor;
1524 else
1525 fn = targetm.asm_out.destructor;
1526 fn (XEXP (DECL_RTL (decl), 0), priority);
1527 }
1528 }
1529
1530 void
1531 init_cgraph (void)
1532 {
1533 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1534 }
1535
1536 /* The edges representing the callers of the NEW_VERSION node were
1537 fixed by cgraph_function_versioning (), now the call_expr in their
1538 respective tree code should be updated to call the NEW_VERSION. */
1539
1540 static void
1541 update_call_expr (struct cgraph_node *new_version)
1542 {
1543 struct cgraph_edge *e;
1544
1545 gcc_assert (new_version);
1546 for (e = new_version->callers; e; e = e->next_caller)
1547 /* Update the call expr on the edges
1548 to call the new version. */
1549 TREE_OPERAND (TREE_OPERAND (get_call_expr_in (e->call_stmt), 0), 0) = new_version->decl;
1550 }
1551
1552
1553 /* Create a new cgraph node which is the new version of
1554 OLD_VERSION node. REDIRECT_CALLERS holds the callers
1555 edges which should be redirected to point to
1556 NEW_VERSION. ALL the callees edges of OLD_VERSION
1557 are cloned to the new version node. Return the new
1558 version node. */
1559
1560 static struct cgraph_node *
1561 cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
1562 tree new_decl, varray_type redirect_callers)
1563 {
1564 struct cgraph_node *new_version;
1565 struct cgraph_edge *e, *new_e;
1566 struct cgraph_edge *next_callee;
1567 unsigned i;
1568
1569 gcc_assert (old_version);
1570
1571 new_version = cgraph_node (new_decl);
1572
1573 new_version->analyzed = true;
1574 new_version->local = old_version->local;
1575 new_version->global = old_version->global;
1576 new_version->rtl = new_version->rtl;
1577 new_version->reachable = true;
1578 new_version->count = old_version->count;
1579
1580 /* Clone the old node callees. Recursive calls are
1581 also cloned. */
1582 for (e = old_version->callees;e; e=e->next_callee)
1583 {
1584 new_e = cgraph_clone_edge (e, new_version, e->call_stmt, 0, e->loop_nest, true);
1585 new_e->count = e->count;
1586 }
1587 /* Fix recursive calls.
1588 If OLD_VERSION has a recursive call after the
1589 previous edge cloning, the new version will have an edge
1590 pointing to the old version, which is wrong;
1591 Redirect it to point to the new version. */
1592 for (e = new_version->callees ; e; e = next_callee)
1593 {
1594 next_callee = e->next_callee;
1595 if (e->callee == old_version)
1596 cgraph_redirect_edge_callee (e, new_version);
1597
1598 if (!next_callee)
1599 break;
1600 }
1601 if (redirect_callers)
1602 for (i = 0; i < VARRAY_ACTIVE_SIZE (redirect_callers); i++)
1603 {
1604 e = VARRAY_GENERIC_PTR (redirect_callers, i);
1605 /* Redirect calls to the old version node
1606 to point to it's new version. */
1607 cgraph_redirect_edge_callee (e, new_version);
1608 }
1609
1610 return new_version;
1611 }
1612
1613 /* Perform function versioning.
1614 Function versioning includes copying of the tree and
1615 a callgraph update (creating a new cgraph node and updating
1616 its callees and callers).
1617
1618 REDIRECT_CALLERS varray includes the edges to be redirected
1619 to the new version.
1620
1621 TREE_MAP is a mapping of tree nodes we want to replace with
1622 new ones (according to results of prior analysis).
1623 OLD_VERSION_NODE is the node that is versioned.
1624 It returns the new version's cgraph node. */
1625
1626 struct cgraph_node *
1627 cgraph_function_versioning (struct cgraph_node *old_version_node,
1628 varray_type redirect_callers,
1629 varray_type tree_map)
1630 {
1631 tree old_decl = old_version_node->decl;
1632 struct cgraph_node *new_version_node = NULL;
1633 tree new_decl;
1634
1635 if (!tree_versionable_function_p (old_decl))
1636 return NULL;
1637
1638 /* Make a new FUNCTION_DECL tree node for the
1639 new version. */
1640 new_decl = copy_node (old_decl);
1641
1642 /* Create the new version's call-graph node.
1643 and update the edges of the new node. */
1644 new_version_node =
1645 cgraph_copy_node_for_versioning (old_version_node, new_decl,
1646 redirect_callers);
1647
1648 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1649 tree_function_versioning (old_decl, new_decl, tree_map, false);
1650 /* Update the call_expr on the edges to call the new version node. */
1651 update_call_expr (new_version_node);
1652
1653 /* Update the new version's properties.
1654 Make The new version visible only within this translation unit.
1655 ??? We cannot use COMDAT linkage because there is no
1656 ABI support for this. */
1657 DECL_EXTERNAL (new_version_node->decl) = 0;
1658 DECL_ONE_ONLY (new_version_node->decl) = 0;
1659 TREE_PUBLIC (new_version_node->decl) = 0;
1660 DECL_COMDAT (new_version_node->decl) = 0;
1661 new_version_node->local.externally_visible = 0;
1662 new_version_node->local.local = 1;
1663 new_version_node->lowered = true;
1664 return new_version_node;
1665 }
1666
1667 /* Produce separate function body for inline clones so the offline copy can be
1668 modified without affecting them. */
1669 struct cgraph_node *
1670 save_inline_function_body (struct cgraph_node *node)
1671 {
1672 struct cgraph_node *first_clone;
1673
1674 gcc_assert (node == cgraph_node (node->decl));
1675
1676 cgraph_lower_function (node);
1677
1678 /* In non-unit-at-a-time we construct full fledged clone we never output to
1679 assembly file. This clone is pointed out by inline_decl of orginal function
1680 and inlining infrastructure knows how to deal with this. */
1681 if (!flag_unit_at_a_time)
1682 {
1683 struct cgraph_edge *e;
1684
1685 first_clone = cgraph_clone_node (node, node->count, 0, false);
1686 first_clone->needed = 0;
1687 first_clone->reachable = 1;
1688 /* Recursively clone all bodies. */
1689 for (e = first_clone->callees; e; e = e->next_callee)
1690 if (!e->inline_failed)
1691 cgraph_clone_inlined_nodes (e, true, false);
1692 }
1693 else
1694 first_clone = node->next_clone;
1695
1696 first_clone->decl = copy_node (node->decl);
1697 node->next_clone = NULL;
1698 if (!flag_unit_at_a_time)
1699 node->inline_decl = first_clone->decl;
1700 first_clone->prev_clone = NULL;
1701 cgraph_insert_node_to_hashtable (first_clone);
1702 gcc_assert (first_clone == cgraph_node (first_clone->decl));
1703
1704 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1705 tree_function_versioning (node->decl, first_clone->decl, NULL, true);
1706
1707 DECL_EXTERNAL (first_clone->decl) = 0;
1708 DECL_ONE_ONLY (first_clone->decl) = 0;
1709 TREE_PUBLIC (first_clone->decl) = 0;
1710 DECL_COMDAT (first_clone->decl) = 0;
1711
1712 for (node = first_clone->next_clone; node; node = node->next_clone)
1713 node->decl = first_clone->decl;
1714 #ifdef ENABLE_CHECKING
1715 verify_cgraph_node (first_clone);
1716 #endif
1717 return first_clone;
1718 }
1719