b1c5c8b461f18dd9120091d93f671842895516cc
[gcc.git] / gcc / cgraphunit.c
1 /* Callgraph based interprocedural optimizations.
2 Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
21
22 /* This module implements main driver of compilation process as well as
23 few basic interprocedural optimizers.
24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
35 (There is one exception needed for implementing GCC extern inline
36 function.)
37
38 - varpool_finalize_variable
39
40 This function has same behavior as the above but is used for static
41 variables.
42
43 - cgraph_finalize_compilation_unit
44
45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
47
48 In the unit-at-a-time the call-graph construction and local function
49 analysis takes place here. Bodies of unreachable functions are released
50 to conserve memory usage.
51
52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
54
55 - cgraph_optimize
56
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
61
62 - cgraph_mark_needed_node
63 - varpool_mark_needed_node
64
65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
69 used by C++ frontend to explicitly mark the keyed methods.
70
71 - analyze_expr callback
72
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
76
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
79
80 - expand_function callback
81
82 This function is used to expand function and pass it into RTL back-end.
83 Front-end should not make any assumptions about when this function can be
84 called. In particular cgraph_assemble_pending_functions,
85 varpool_assemble_pending_variables, cgraph_finalize_function,
86 varpool_finalize_function, cgraph_optimize can cause arbitrarily
87 previously finalized functions to be expanded.
88
89 We implement two compilation modes.
90
91 - unit-at-a-time: In this mode analyzing of all functions is deferred
92 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
93
94 In cgraph_finalize_compilation_unit the reachable functions are
95 analyzed. During analysis the call-graph edges from reachable
96 functions are constructed and their destinations are marked as
97 reachable. References to functions and variables are discovered too
98 and variables found to be needed output to the assembly file. Via
99 mark_referenced call in assemble_variable functions referenced by
100 static variables are noticed too.
101
102 The intra-procedural information is produced and its existence
103 indicated by global_info_ready. Once this flag is set it is impossible
104 to change function from !reachable to reachable and thus
105 assemble_variable no longer call mark_referenced.
106
107 Finally the call-graph is topologically sorted and all reachable functions
108 that has not been completely inlined or are not external are output.
109
110 ??? It is possible that reference to function or variable is optimized
111 out. We can not deal with this nicely because topological order is not
112 suitable for it. For tree-ssa we may consider another pass doing
113 optimization and re-discovering reachable functions.
114
115 ??? Reorganize code so variables are output very last and only if they
116 really has been referenced by produced code, so we catch more cases
117 where reference has been optimized out.
118
119 - non-unit-at-a-time
120
121 All functions are variables are output as early as possible to conserve
122 memory consumption. This may or may not result in less memory used but
123 it is still needed for some legacy code that rely on particular ordering
124 of things output from the compiler.
125
126 Varpool data structures are not used and variables are output directly.
127
128 Functions are output early using call of
129 cgraph_assemble_pending_function from cgraph_finalize_function. The
130 decision on whether function is needed is made more conservative so
131 uninlininable static functions are needed too. During the call-graph
132 construction the edge destinations are not marked as reachable and it
133 is completely relied upn assemble_variable to mark them. */
134
135
136 #include "config.h"
137 #include "system.h"
138 #include "coretypes.h"
139 #include "tm.h"
140 #include "tree.h"
141 #include "rtl.h"
142 #include "tree-flow.h"
143 #include "tree-inline.h"
144 #include "langhooks.h"
145 #include "pointer-set.h"
146 #include "toplev.h"
147 #include "flags.h"
148 #include "ggc.h"
149 #include "debug.h"
150 #include "target.h"
151 #include "cgraph.h"
152 #include "diagnostic.h"
153 #include "timevar.h"
154 #include "params.h"
155 #include "fibheap.h"
156 #include "c-common.h"
157 #include "intl.h"
158 #include "function.h"
159 #include "ipa-prop.h"
160 #include "tree-gimple.h"
161 #include "tree-pass.h"
162 #include "output.h"
163
164 static void cgraph_expand_all_functions (void);
165 static void cgraph_mark_functions_to_output (void);
166 static void cgraph_expand_function (struct cgraph_node *);
167 static tree record_reference (tree *, int *, void *);
168 static void cgraph_output_pending_asms (void);
169 static void cgraph_increase_alignment (void);
170 static void initialize_inline_failed (struct cgraph_node *);
171
172 /* Records tree nodes seen in record_reference. Simply using
173 walk_tree_without_duplicates doesn't guarantee each node is visited
174 once because it gets a new htab upon each recursive call from
175 record_reference itself. */
176 static struct pointer_set_t *visited_nodes;
177
178 static FILE *cgraph_dump_file;
179
180 /* Determine if function DECL is needed. That is, visible to something
181 either outside this translation unit, something magic in the system
182 configury, or (if not doing unit-at-a-time) to something we havn't
183 seen yet. */
184
185 static bool
186 decide_is_function_needed (struct cgraph_node *node, tree decl)
187 {
188 tree origin;
189 if (MAIN_NAME_P (DECL_NAME (decl))
190 && TREE_PUBLIC (decl))
191 {
192 node->local.externally_visible = true;
193 return true;
194 }
195
196 /* If the user told us it is used, then it must be so. */
197 if (node->local.externally_visible)
198 return true;
199
200 if (!flag_unit_at_a_time && lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
201 return true;
202
203 /* ??? If the assembler name is set by hand, it is possible to assemble
204 the name later after finalizing the function and the fact is noticed
205 in assemble_name then. This is arguably a bug. */
206 if (DECL_ASSEMBLER_NAME_SET_P (decl)
207 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
208 return true;
209
210 /* If we decided it was needed before, but at the time we didn't have
211 the body of the function available, then it's still needed. We have
212 to go back and re-check its dependencies now. */
213 if (node->needed)
214 return true;
215
216 /* Externally visible functions must be output. The exception is
217 COMDAT functions that must be output only when they are needed.
218
219 When not optimizing, also output the static functions. (see
220 PR24561), but don't do so for always_inline functions, functions
221 declared inline and nested functions. These was optimized out
222 in the original implementation and it is unclear whether we want
223 to change the behavior here. */
224 if (((TREE_PUBLIC (decl)
225 || (!optimize && !node->local.disregard_inline_limits
226 && !DECL_DECLARED_INLINE_P (decl)
227 && !node->origin))
228 && !flag_whole_program)
229 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
230 return true;
231
232 /* Constructors and destructors are reachable from the runtime by
233 some mechanism. */
234 if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
235 return true;
236
237 if (flag_unit_at_a_time)
238 return false;
239
240 /* If not doing unit at a time, then we'll only defer this function
241 if its marked for inlining. Otherwise we want to emit it now. */
242
243 /* "extern inline" functions are never output locally. */
244 if (DECL_EXTERNAL (decl))
245 return false;
246 /* Nested functions of extern inline function shall not be emit unless
247 we inlined the origin. */
248 for (origin = decl_function_context (decl); origin;
249 origin = decl_function_context (origin))
250 if (DECL_EXTERNAL (origin))
251 return false;
252 /* We want to emit COMDAT functions only when absolutely necessary. */
253 if (DECL_COMDAT (decl))
254 return false;
255 if (!DECL_INLINE (decl)
256 || (!node->local.disregard_inline_limits
257 /* When declared inline, defer even the uninlinable functions.
258 This allows them to be eliminated when unused. */
259 && !DECL_DECLARED_INLINE_P (decl)
260 && (!node->local.inlinable || !cgraph_default_inline_p (node, NULL))))
261 return true;
262
263 return false;
264 }
265
266 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
267 functions into callgraph in a way so they look like ordinary reachable
268 functions inserted into callgraph already at construction time. */
269
270 bool
271 cgraph_process_new_functions (void)
272 {
273 bool output = false;
274 tree fndecl;
275 struct cgraph_node *node;
276
277 /* Note that this queue may grow as its being processed, as the new
278 functions may generate new ones. */
279 while (cgraph_new_nodes)
280 {
281 node = cgraph_new_nodes;
282 fndecl = node->decl;
283 cgraph_new_nodes = cgraph_new_nodes->next_needed;
284 switch (cgraph_state)
285 {
286 case CGRAPH_STATE_CONSTRUCTION:
287 /* At construction time we just need to finalize function and move
288 it into reachable functions list. */
289
290 node->next_needed = NULL;
291 cgraph_finalize_function (fndecl, false);
292 cgraph_mark_reachable_node (node);
293 output = true;
294 break;
295
296 case CGRAPH_STATE_IPA:
297 case CGRAPH_STATE_IPA_SSA:
298 /* When IPA optimization already started, do all essential
299 transformations that has been already performed on the whole
300 cgraph but not on this function. */
301
302 tree_register_cfg_hooks ();
303 if (!node->analyzed)
304 cgraph_analyze_function (node);
305 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
306 current_function_decl = fndecl;
307 node->local.inlinable = tree_inlinable_function_p (fndecl);
308 node->local.self_insns = estimate_num_insns (fndecl);
309 node->local.disregard_inline_limits
310 = lang_hooks.tree_inlining.disregard_inline_limits (fndecl);
311 /* Inlining characteristics are maintained by the
312 cgraph_mark_inline. */
313 node->global.insns = node->local.self_insns;
314 initialize_inline_failed (node);
315 if (flag_really_no_inline && !node->local.disregard_inline_limits)
316 node->local.inlinable = 0;
317 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
318 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
319 /* When not optimizing, be sure we run early local passes anyway
320 to expand OMP. */
321 || !optimize)
322 execute_pass_list (pass_early_local_passes.sub);
323 free_dominance_info (CDI_POST_DOMINATORS);
324 free_dominance_info (CDI_DOMINATORS);
325 pop_cfun ();
326 current_function_decl = NULL;
327 break;
328
329 case CGRAPH_STATE_EXPANSION:
330 /* Functions created during expansion shall be compiled
331 directly. */
332 node->output = 0;
333 cgraph_expand_function (node);
334 break;
335
336 default:
337 gcc_unreachable ();
338 break;
339 }
340 }
341 return output;
342 }
343
344 /* When not doing unit-at-a-time, output all functions enqueued.
345 Return true when such a functions were found. */
346
347 static bool
348 cgraph_assemble_pending_functions (void)
349 {
350 bool output = false;
351
352 if (flag_unit_at_a_time)
353 return false;
354
355 cgraph_output_pending_asms ();
356
357 while (cgraph_nodes_queue)
358 {
359 struct cgraph_node *n = cgraph_nodes_queue;
360
361 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
362 n->next_needed = NULL;
363 if (!n->global.inlined_to
364 && !n->alias
365 && !DECL_EXTERNAL (n->decl))
366 {
367 cgraph_expand_function (n);
368 output = true;
369 }
370 output |= cgraph_process_new_functions ();
371 }
372
373 return output;
374 }
375
376
377 /* As an GCC extension we allow redefinition of the function. The
378 semantics when both copies of bodies differ is not well defined.
379 We replace the old body with new body so in unit at a time mode
380 we always use new body, while in normal mode we may end up with
381 old body inlined into some functions and new body expanded and
382 inlined in others.
383
384 ??? It may make more sense to use one body for inlining and other
385 body for expanding the function but this is difficult to do. */
386
387 static void
388 cgraph_reset_node (struct cgraph_node *node)
389 {
390 /* If node->output is set, then this is a unit-at-a-time compilation
391 and we have already begun whole-unit analysis. This is *not*
392 testing for whether we've already emitted the function. That
393 case can be sort-of legitimately seen with real function
394 redefinition errors. I would argue that the front end should
395 never present us with such a case, but don't enforce that for now. */
396 gcc_assert (!node->output);
397
398 /* Reset our data structures so we can analyze the function again. */
399 memset (&node->local, 0, sizeof (node->local));
400 memset (&node->global, 0, sizeof (node->global));
401 memset (&node->rtl, 0, sizeof (node->rtl));
402 node->analyzed = false;
403 node->local.redefined_extern_inline = true;
404 node->local.finalized = false;
405
406 if (!flag_unit_at_a_time)
407 {
408 struct cgraph_node *n, *next;
409
410 for (n = cgraph_nodes; n; n = next)
411 {
412 next = n->next;
413 if (n->global.inlined_to == node)
414 cgraph_remove_node (n);
415 }
416 }
417
418 cgraph_node_remove_callees (node);
419
420 /* We may need to re-queue the node for assembling in case
421 we already proceeded it and ignored as not needed. */
422 if (node->reachable && !flag_unit_at_a_time)
423 {
424 struct cgraph_node *n;
425
426 for (n = cgraph_nodes_queue; n; n = n->next_needed)
427 if (n == node)
428 break;
429 if (!n)
430 node->reachable = 0;
431 }
432 }
433
434 static void
435 cgraph_lower_function (struct cgraph_node *node)
436 {
437 if (node->lowered)
438 return;
439 tree_lowering_passes (node->decl);
440 node->lowered = true;
441 }
442
443 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
444 logic in effect. If NESTED is true, then our caller cannot stand to have
445 the garbage collector run at the moment. We would need to either create
446 a new GC context, or just not compile right now. */
447
448 void
449 cgraph_finalize_function (tree decl, bool nested)
450 {
451 struct cgraph_node *node = cgraph_node (decl);
452
453 if (node->local.finalized)
454 cgraph_reset_node (node);
455
456 notice_global_symbol (decl);
457 node->decl = decl;
458 node->local.finalized = true;
459 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
460 if (node->nested)
461 lower_nested_functions (decl);
462 gcc_assert (!node->nested);
463
464 /* If not unit at a time, then we need to create the call graph
465 now, so that called functions can be queued and emitted now. */
466 if (!flag_unit_at_a_time)
467 {
468 cgraph_analyze_function (node);
469 cgraph_decide_inlining_incrementally (node, false);
470 }
471
472 if (decide_is_function_needed (node, decl))
473 cgraph_mark_needed_node (node);
474
475 /* Since we reclaim unreachable nodes at the end of every language
476 level unit, we need to be conservative about possible entry points
477 there. */
478 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)))
479 cgraph_mark_reachable_node (node);
480
481 /* If not unit at a time, go ahead and emit everything we've found
482 to be reachable at this time. */
483 if (!nested)
484 {
485 if (!cgraph_assemble_pending_functions ())
486 ggc_collect ();
487 }
488
489 /* If we've not yet emitted decl, tell the debug info about it. */
490 if (!TREE_ASM_WRITTEN (decl))
491 (*debug_hooks->deferred_inline_function) (decl);
492
493 /* Possibly warn about unused parameters. */
494 if (warn_unused_parameter)
495 do_warn_unused_parameter (decl);
496 }
497
498 /* Walk tree and record all calls. Called via walk_tree. */
499 static tree
500 record_reference (tree *tp, int *walk_subtrees, void *data)
501 {
502 tree t = *tp;
503
504 switch (TREE_CODE (t))
505 {
506 case VAR_DECL:
507 /* ??? Really, we should mark this decl as *potentially* referenced
508 by this function and re-examine whether the decl is actually used
509 after rtl has been generated. */
510 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
511 {
512 varpool_mark_needed_node (varpool_node (t));
513 if (lang_hooks.callgraph.analyze_expr)
514 return lang_hooks.callgraph.analyze_expr (tp, walk_subtrees,
515 data);
516 }
517 break;
518
519 case FDESC_EXPR:
520 case ADDR_EXPR:
521 if (flag_unit_at_a_time)
522 {
523 /* Record dereferences to the functions. This makes the
524 functions reachable unconditionally. */
525 tree decl = TREE_OPERAND (*tp, 0);
526 if (TREE_CODE (decl) == FUNCTION_DECL)
527 cgraph_mark_needed_node (cgraph_node (decl));
528 }
529 break;
530
531 default:
532 /* Save some cycles by not walking types and declaration as we
533 won't find anything useful there anyway. */
534 if (IS_TYPE_OR_DECL_P (*tp))
535 {
536 *walk_subtrees = 0;
537 break;
538 }
539
540 if ((unsigned int) TREE_CODE (t) >= LAST_AND_UNUSED_TREE_CODE)
541 return lang_hooks.callgraph.analyze_expr (tp, walk_subtrees, data);
542 break;
543 }
544
545 return NULL;
546 }
547
548 /* Create cgraph edges for function calls inside BODY from NODE. */
549
550 static void
551 cgraph_create_edges (struct cgraph_node *node, tree body)
552 {
553 basic_block bb;
554
555 struct function *this_cfun = DECL_STRUCT_FUNCTION (body);
556 block_stmt_iterator bsi;
557 tree step;
558 visited_nodes = pointer_set_create ();
559
560 /* Reach the trees by walking over the CFG, and note the
561 enclosing basic-blocks in the call edges. */
562 FOR_EACH_BB_FN (bb, this_cfun)
563 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
564 {
565 tree stmt = bsi_stmt (bsi);
566 tree call = get_call_expr_in (stmt);
567 tree decl;
568
569 if (call && (decl = get_callee_fndecl (call)))
570 {
571 cgraph_create_edge (node, cgraph_node (decl), stmt,
572 bb->count,
573 bb->loop_depth);
574 walk_tree (&TREE_OPERAND (call, 1),
575 record_reference, node, visited_nodes);
576 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
577 walk_tree (&GIMPLE_STMT_OPERAND (stmt, 0),
578 record_reference, node, visited_nodes);
579 }
580 else
581 walk_tree (bsi_stmt_ptr (bsi), record_reference, node, visited_nodes);
582 }
583
584 /* Look for initializers of constant variables and private statics. */
585 for (step = DECL_STRUCT_FUNCTION (body)->unexpanded_var_list;
586 step;
587 step = TREE_CHAIN (step))
588 {
589 tree decl = TREE_VALUE (step);
590 if (TREE_CODE (decl) == VAR_DECL
591 && (TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
592 && flag_unit_at_a_time)
593 varpool_finalize_decl (decl);
594 else if (TREE_CODE (decl) == VAR_DECL && DECL_INITIAL (decl))
595 walk_tree (&DECL_INITIAL (decl), record_reference, node, visited_nodes);
596 }
597
598 pointer_set_destroy (visited_nodes);
599 visited_nodes = NULL;
600 }
601
602 void
603 record_references_in_initializer (tree decl)
604 {
605 visited_nodes = pointer_set_create ();
606 walk_tree (&DECL_INITIAL (decl), record_reference, NULL, visited_nodes);
607 pointer_set_destroy (visited_nodes);
608 visited_nodes = NULL;
609 }
610
611
612 /* Give initial reasons why inlining would fail. Those gets
613 either NULLified or usually overwritten by more precise reason
614 later. */
615 static void
616 initialize_inline_failed (struct cgraph_node *node)
617 {
618 struct cgraph_edge *e;
619
620 for (e = node->callers; e; e = e->next_caller)
621 {
622 gcc_assert (!e->callee->global.inlined_to);
623 gcc_assert (e->inline_failed);
624 if (node->local.redefined_extern_inline)
625 e->inline_failed = N_("redefined extern inline functions are not "
626 "considered for inlining");
627 else if (!node->local.inlinable)
628 e->inline_failed = N_("function not inlinable");
629 else
630 e->inline_failed = N_("function not considered for inlining");
631 }
632 }
633
634 /* Rebuild call edges from current function after a passes not aware
635 of cgraph updating. */
636 static unsigned int
637 rebuild_cgraph_edges (void)
638 {
639 basic_block bb;
640 struct cgraph_node *node = cgraph_node (current_function_decl);
641 block_stmt_iterator bsi;
642
643 cgraph_node_remove_callees (node);
644
645 node->count = ENTRY_BLOCK_PTR->count;
646
647 FOR_EACH_BB (bb)
648 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
649 {
650 tree stmt = bsi_stmt (bsi);
651 tree call = get_call_expr_in (stmt);
652 tree decl;
653
654 if (call && (decl = get_callee_fndecl (call)))
655 cgraph_create_edge (node, cgraph_node (decl), stmt,
656 bb->count,
657 bb->loop_depth);
658 }
659 initialize_inline_failed (node);
660 gcc_assert (!node->global.inlined_to);
661 return 0;
662 }
663
664 struct tree_opt_pass pass_rebuild_cgraph_edges =
665 {
666 NULL, /* name */
667 NULL, /* gate */
668 rebuild_cgraph_edges, /* execute */
669 NULL, /* sub */
670 NULL, /* next */
671 0, /* static_pass_number */
672 0, /* tv_id */
673 PROP_cfg, /* properties_required */
674 0, /* properties_provided */
675 0, /* properties_destroyed */
676 0, /* todo_flags_start */
677 0, /* todo_flags_finish */
678 0 /* letter */
679 };
680
681 /* Verify cgraph nodes of given cgraph node. */
682 void
683 verify_cgraph_node (struct cgraph_node *node)
684 {
685 struct cgraph_edge *e;
686 struct cgraph_node *main_clone;
687 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
688 basic_block this_block;
689 block_stmt_iterator bsi;
690 bool error_found = false;
691
692 if (errorcount || sorrycount)
693 return;
694
695 timevar_push (TV_CGRAPH_VERIFY);
696 for (e = node->callees; e; e = e->next_callee)
697 if (e->aux)
698 {
699 error ("aux field set for edge %s->%s",
700 cgraph_node_name (e->caller), cgraph_node_name (e->callee));
701 error_found = true;
702 }
703 if (node->count < 0)
704 {
705 error ("Execution count is negative");
706 error_found = true;
707 }
708 for (e = node->callers; e; e = e->next_caller)
709 {
710 if (e->count < 0)
711 {
712 error ("caller edge count is negative");
713 error_found = true;
714 }
715 if (!e->inline_failed)
716 {
717 if (node->global.inlined_to
718 != (e->caller->global.inlined_to
719 ? e->caller->global.inlined_to : e->caller))
720 {
721 error ("inlined_to pointer is wrong");
722 error_found = true;
723 }
724 if (node->callers->next_caller)
725 {
726 error ("multiple inline callers");
727 error_found = true;
728 }
729 }
730 else
731 if (node->global.inlined_to)
732 {
733 error ("inlined_to pointer set for noninline callers");
734 error_found = true;
735 }
736 }
737 if (!node->callers && node->global.inlined_to)
738 {
739 error ("inlined_to pointer is set but no predecessors found");
740 error_found = true;
741 }
742 if (node->global.inlined_to == node)
743 {
744 error ("inlined_to pointer refers to itself");
745 error_found = true;
746 }
747
748 for (main_clone = cgraph_node (node->decl); main_clone;
749 main_clone = main_clone->next_clone)
750 if (main_clone == node)
751 break;
752 if (!cgraph_node (node->decl))
753 {
754 error ("node not found in cgraph_hash");
755 error_found = true;
756 }
757
758 if (node->analyzed
759 && DECL_SAVED_TREE (node->decl) && !TREE_ASM_WRITTEN (node->decl)
760 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to))
761 {
762 if (this_cfun->cfg)
763 {
764 /* The nodes we're interested in are never shared, so walk
765 the tree ignoring duplicates. */
766 visited_nodes = pointer_set_create ();
767 /* Reach the trees by walking over the CFG, and note the
768 enclosing basic-blocks in the call edges. */
769 FOR_EACH_BB_FN (this_block, this_cfun)
770 for (bsi = bsi_start (this_block); !bsi_end_p (bsi); bsi_next (&bsi))
771 {
772 tree stmt = bsi_stmt (bsi);
773 tree call = get_call_expr_in (stmt);
774 tree decl;
775 if (call && (decl = get_callee_fndecl (call)))
776 {
777 struct cgraph_edge *e = cgraph_edge (node, stmt);
778 if (e)
779 {
780 if (e->aux)
781 {
782 error ("shared call_stmt:");
783 debug_generic_stmt (stmt);
784 error_found = true;
785 }
786 if (e->callee->decl != cgraph_node (decl)->decl
787 && e->inline_failed)
788 {
789 error ("edge points to wrong declaration:");
790 debug_tree (e->callee->decl);
791 fprintf (stderr," Instead of:");
792 debug_tree (decl);
793 }
794 e->aux = (void *)1;
795 }
796 else
797 {
798 error ("missing callgraph edge for call stmt:");
799 debug_generic_stmt (stmt);
800 error_found = true;
801 }
802 }
803 }
804 pointer_set_destroy (visited_nodes);
805 visited_nodes = NULL;
806 }
807 else
808 /* No CFG available?! */
809 gcc_unreachable ();
810
811 for (e = node->callees; e; e = e->next_callee)
812 {
813 if (!e->aux)
814 {
815 error ("edge %s->%s has no corresponding call_stmt",
816 cgraph_node_name (e->caller),
817 cgraph_node_name (e->callee));
818 debug_generic_stmt (e->call_stmt);
819 error_found = true;
820 }
821 e->aux = 0;
822 }
823 }
824 if (error_found)
825 {
826 dump_cgraph_node (stderr, node);
827 internal_error ("verify_cgraph_node failed");
828 }
829 timevar_pop (TV_CGRAPH_VERIFY);
830 }
831
832 /* Verify whole cgraph structure. */
833 void
834 verify_cgraph (void)
835 {
836 struct cgraph_node *node;
837
838 if (sorrycount || errorcount)
839 return;
840
841 for (node = cgraph_nodes; node; node = node->next)
842 verify_cgraph_node (node);
843 }
844
845 /* Output all asm statements we have stored up to be output. */
846
847 static void
848 cgraph_output_pending_asms (void)
849 {
850 struct cgraph_asm_node *can;
851
852 if (errorcount || sorrycount)
853 return;
854
855 for (can = cgraph_asm_nodes; can; can = can->next)
856 assemble_asm (can->asm_str);
857 cgraph_asm_nodes = NULL;
858 }
859
860 /* Analyze the function scheduled to be output. */
861 void
862 cgraph_analyze_function (struct cgraph_node *node)
863 {
864 tree decl = node->decl;
865
866 current_function_decl = decl;
867 push_cfun (DECL_STRUCT_FUNCTION (decl));
868 cgraph_lower_function (node);
869
870 /* First kill forward declaration so reverse inlining works properly. */
871 cgraph_create_edges (node, decl);
872
873 node->local.estimated_self_stack_size = estimated_stack_frame_size ();
874 node->global.estimated_stack_size = node->local.estimated_self_stack_size;
875 node->global.stack_frame_offset = 0;
876 node->local.inlinable = tree_inlinable_function_p (decl);
877 if (!flag_unit_at_a_time)
878 node->local.self_insns = estimate_num_insns (decl);
879 if (node->local.inlinable)
880 node->local.disregard_inline_limits
881 = lang_hooks.tree_inlining.disregard_inline_limits (decl);
882 initialize_inline_failed (node);
883 if (flag_really_no_inline && !node->local.disregard_inline_limits)
884 node->local.inlinable = 0;
885 /* Inlining characteristics are maintained by the cgraph_mark_inline. */
886 node->global.insns = node->local.self_insns;
887 if (!flag_unit_at_a_time)
888 {
889 bitmap_obstack_initialize (NULL);
890 tree_register_cfg_hooks ();
891 execute_pass_list (pass_early_local_passes.sub);
892 free_dominance_info (CDI_POST_DOMINATORS);
893 free_dominance_info (CDI_DOMINATORS);
894 bitmap_obstack_release (NULL);
895 }
896
897 node->analyzed = true;
898 pop_cfun ();
899 current_function_decl = NULL;
900 }
901
902 /* Look for externally_visible and used attributes and mark cgraph nodes
903 accordingly.
904
905 We cannot mark the nodes at the point the attributes are processed (in
906 handle_*_attribute) because the copy of the declarations available at that
907 point may not be canonical. For example, in:
908
909 void f();
910 void f() __attribute__((used));
911
912 the declaration we see in handle_used_attribute will be the second
913 declaration -- but the front end will subsequently merge that declaration
914 with the original declaration and discard the second declaration.
915
916 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
917
918 void f() {}
919 void f() __attribute__((externally_visible));
920
921 is valid.
922
923 So, we walk the nodes at the end of the translation unit, applying the
924 attributes at that point. */
925
926 static void
927 process_function_and_variable_attributes (struct cgraph_node *first,
928 struct varpool_node *first_var)
929 {
930 struct cgraph_node *node;
931 struct varpool_node *vnode;
932
933 for (node = cgraph_nodes; node != first; node = node->next)
934 {
935 tree decl = node->decl;
936 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
937 {
938 mark_decl_referenced (decl);
939 if (node->local.finalized)
940 cgraph_mark_needed_node (node);
941 }
942 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
943 {
944 if (! TREE_PUBLIC (node->decl))
945 warning (OPT_Wattributes,
946 "%J%<externally_visible%> attribute have effect only on public objects",
947 node->decl);
948 else
949 {
950 if (node->local.finalized)
951 cgraph_mark_needed_node (node);
952 node->local.externally_visible = true;
953 }
954 }
955 }
956 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
957 {
958 tree decl = vnode->decl;
959 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
960 {
961 mark_decl_referenced (decl);
962 if (vnode->finalized)
963 varpool_mark_needed_node (vnode);
964 }
965 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
966 {
967 if (! TREE_PUBLIC (vnode->decl))
968 warning (OPT_Wattributes,
969 "%J%<externally_visible%> attribute have effect only on public objects",
970 vnode->decl);
971 else
972 {
973 if (vnode->finalized)
974 varpool_mark_needed_node (vnode);
975 vnode->externally_visible = true;
976 }
977 }
978 }
979 }
980
981 /* Analyze the whole compilation unit once it is parsed completely. */
982
983 void
984 cgraph_finalize_compilation_unit (void)
985 {
986 struct cgraph_node *node, *next;
987 /* Keep track of already processed nodes when called multiple times for
988 intermodule optimization. */
989 static struct cgraph_node *first_analyzed;
990 struct cgraph_node *first_processed = first_analyzed;
991 static struct varpool_node *first_analyzed_var;
992
993 if (errorcount || sorrycount)
994 return;
995
996 finish_aliases_1 ();
997
998 if (!flag_unit_at_a_time)
999 {
1000 cgraph_output_pending_asms ();
1001 cgraph_assemble_pending_functions ();
1002 varpool_output_debug_info ();
1003 return;
1004 }
1005
1006 if (!quiet_flag)
1007 {
1008 fprintf (stderr, "\nAnalyzing compilation unit\n");
1009 fflush (stderr);
1010 }
1011
1012 timevar_push (TV_CGRAPH);
1013 process_function_and_variable_attributes (first_processed,
1014 first_analyzed_var);
1015 first_processed = cgraph_nodes;
1016 first_analyzed_var = varpool_nodes;
1017 varpool_analyze_pending_decls ();
1018 if (cgraph_dump_file)
1019 {
1020 fprintf (cgraph_dump_file, "Initial entry points:");
1021 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1022 if (node->needed && DECL_SAVED_TREE (node->decl))
1023 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1024 fprintf (cgraph_dump_file, "\n");
1025 }
1026
1027 /* Propagate reachability flag and lower representation of all reachable
1028 functions. In the future, lowering will introduce new functions and
1029 new entry points on the way (by template instantiation and virtual
1030 method table generation for instance). */
1031 while (cgraph_nodes_queue)
1032 {
1033 struct cgraph_edge *edge;
1034 tree decl = cgraph_nodes_queue->decl;
1035
1036 node = cgraph_nodes_queue;
1037 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
1038 node->next_needed = NULL;
1039
1040 /* ??? It is possible to create extern inline function and later using
1041 weak alias attribute to kill its body. See
1042 gcc.c-torture/compile/20011119-1.c */
1043 if (!DECL_SAVED_TREE (decl))
1044 {
1045 cgraph_reset_node (node);
1046 continue;
1047 }
1048
1049 gcc_assert (!node->analyzed && node->reachable);
1050 gcc_assert (DECL_SAVED_TREE (decl));
1051
1052 cgraph_analyze_function (node);
1053
1054 for (edge = node->callees; edge; edge = edge->next_callee)
1055 if (!edge->callee->reachable)
1056 cgraph_mark_reachable_node (edge->callee);
1057
1058 /* We finalize local static variables during constructing callgraph
1059 edges. Process their attributes too. */
1060 process_function_and_variable_attributes (first_processed,
1061 first_analyzed_var);
1062 first_processed = cgraph_nodes;
1063 first_analyzed_var = varpool_nodes;
1064 varpool_analyze_pending_decls ();
1065 }
1066
1067 /* Collect entry points to the unit. */
1068 if (cgraph_dump_file)
1069 {
1070 fprintf (cgraph_dump_file, "Unit entry points:");
1071 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1072 if (node->needed && DECL_SAVED_TREE (node->decl))
1073 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1074 fprintf (cgraph_dump_file, "\n\nInitial ");
1075 dump_cgraph (cgraph_dump_file);
1076 }
1077
1078 if (cgraph_dump_file)
1079 fprintf (cgraph_dump_file, "\nReclaiming functions:");
1080
1081 for (node = cgraph_nodes; node != first_analyzed; node = next)
1082 {
1083 tree decl = node->decl;
1084 next = node->next;
1085
1086 if (node->local.finalized && !DECL_SAVED_TREE (decl))
1087 cgraph_reset_node (node);
1088
1089 if (!node->reachable && DECL_SAVED_TREE (decl))
1090 {
1091 if (cgraph_dump_file)
1092 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1093 cgraph_remove_node (node);
1094 continue;
1095 }
1096 else
1097 node->next_needed = NULL;
1098 gcc_assert (!node->local.finalized || DECL_SAVED_TREE (decl));
1099 gcc_assert (node->analyzed == node->local.finalized);
1100 }
1101 if (cgraph_dump_file)
1102 {
1103 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1104 dump_cgraph (cgraph_dump_file);
1105 }
1106 first_analyzed = cgraph_nodes;
1107 ggc_collect ();
1108 timevar_pop (TV_CGRAPH);
1109 }
1110 /* Figure out what functions we want to assemble. */
1111
1112 static void
1113 cgraph_mark_functions_to_output (void)
1114 {
1115 struct cgraph_node *node;
1116
1117 for (node = cgraph_nodes; node; node = node->next)
1118 {
1119 tree decl = node->decl;
1120 struct cgraph_edge *e;
1121
1122 gcc_assert (!node->output);
1123
1124 for (e = node->callers; e; e = e->next_caller)
1125 if (e->inline_failed)
1126 break;
1127
1128 /* We need to output all local functions that are used and not
1129 always inlined, as well as those that are reachable from
1130 outside the current compilation unit. */
1131 if (DECL_SAVED_TREE (decl)
1132 && !node->global.inlined_to
1133 && (node->needed
1134 || (e && node->reachable))
1135 && !TREE_ASM_WRITTEN (decl)
1136 && !DECL_EXTERNAL (decl))
1137 node->output = 1;
1138 else
1139 {
1140 /* We should've reclaimed all functions that are not needed. */
1141 #ifdef ENABLE_CHECKING
1142 if (!node->global.inlined_to && DECL_SAVED_TREE (decl)
1143 && !DECL_EXTERNAL (decl))
1144 {
1145 dump_cgraph_node (stderr, node);
1146 internal_error ("failed to reclaim unneeded function");
1147 }
1148 #endif
1149 gcc_assert (node->global.inlined_to || !DECL_SAVED_TREE (decl)
1150 || DECL_EXTERNAL (decl));
1151
1152 }
1153
1154 }
1155 }
1156
1157 /* Expand function specified by NODE. */
1158
1159 static void
1160 cgraph_expand_function (struct cgraph_node *node)
1161 {
1162 tree decl = node->decl;
1163
1164 /* We ought to not compile any inline clones. */
1165 gcc_assert (!node->global.inlined_to);
1166
1167 if (flag_unit_at_a_time)
1168 announce_function (decl);
1169
1170 cgraph_lower_function (node);
1171
1172 /* Generate RTL for the body of DECL. */
1173 lang_hooks.callgraph.expand_function (decl);
1174
1175 /* Make sure that BE didn't give up on compiling. */
1176 /* ??? Can happen with nested function of extern inline. */
1177 gcc_assert (TREE_ASM_WRITTEN (node->decl));
1178
1179 current_function_decl = NULL;
1180 if (!cgraph_preserve_function_body_p (node->decl))
1181 {
1182 cgraph_release_function_body (node);
1183 /* Eliminate all call edges. This is important so the call_expr no longer
1184 points to the dead function body. */
1185 cgraph_node_remove_callees (node);
1186 }
1187
1188 cgraph_function_flags_ready = true;
1189 }
1190
1191 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
1192
1193 bool
1194 cgraph_inline_p (struct cgraph_edge *e, const char **reason)
1195 {
1196 *reason = e->inline_failed;
1197 return !e->inline_failed;
1198 }
1199
1200
1201
1202 /* Expand all functions that must be output.
1203
1204 Attempt to topologically sort the nodes so function is output when
1205 all called functions are already assembled to allow data to be
1206 propagated across the callgraph. Use a stack to get smaller distance
1207 between a function and its callees (later we may choose to use a more
1208 sophisticated algorithm for function reordering; we will likely want
1209 to use subsections to make the output functions appear in top-down
1210 order). */
1211
1212 static void
1213 cgraph_expand_all_functions (void)
1214 {
1215 struct cgraph_node *node;
1216 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1217 int order_pos = 0, new_order_pos = 0;
1218 int i;
1219
1220 order_pos = cgraph_postorder (order);
1221 gcc_assert (order_pos == cgraph_n_nodes);
1222
1223 /* Garbage collector may remove inline clones we eliminate during
1224 optimization. So we must be sure to not reference them. */
1225 for (i = 0; i < order_pos; i++)
1226 if (order[i]->output)
1227 order[new_order_pos++] = order[i];
1228
1229 for (i = new_order_pos - 1; i >= 0; i--)
1230 {
1231 node = order[i];
1232 if (node->output)
1233 {
1234 gcc_assert (node->reachable);
1235 node->output = 0;
1236 cgraph_expand_function (node);
1237 }
1238 }
1239 cgraph_process_new_functions ();
1240
1241 free (order);
1242
1243 }
1244
1245 /* This is used to sort the node types by the cgraph order number. */
1246
1247 struct cgraph_order_sort
1248 {
1249 enum { ORDER_UNDEFINED = 0, ORDER_FUNCTION, ORDER_VAR, ORDER_ASM } kind;
1250 union
1251 {
1252 struct cgraph_node *f;
1253 struct varpool_node *v;
1254 struct cgraph_asm_node *a;
1255 } u;
1256 };
1257
1258 /* Output all functions, variables, and asm statements in the order
1259 according to their order fields, which is the order in which they
1260 appeared in the file. This implements -fno-toplevel-reorder. In
1261 this mode we may output functions and variables which don't really
1262 need to be output. */
1263
1264 static void
1265 cgraph_output_in_order (void)
1266 {
1267 int max;
1268 size_t size;
1269 struct cgraph_order_sort *nodes;
1270 int i;
1271 struct cgraph_node *pf;
1272 struct varpool_node *pv;
1273 struct cgraph_asm_node *pa;
1274
1275 max = cgraph_order;
1276 size = max * sizeof (struct cgraph_order_sort);
1277 nodes = (struct cgraph_order_sort *) alloca (size);
1278 memset (nodes, 0, size);
1279
1280 varpool_analyze_pending_decls ();
1281
1282 for (pf = cgraph_nodes; pf; pf = pf->next)
1283 {
1284 if (pf->output)
1285 {
1286 i = pf->order;
1287 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1288 nodes[i].kind = ORDER_FUNCTION;
1289 nodes[i].u.f = pf;
1290 }
1291 }
1292
1293 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
1294 {
1295 i = pv->order;
1296 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1297 nodes[i].kind = ORDER_VAR;
1298 nodes[i].u.v = pv;
1299 }
1300
1301 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1302 {
1303 i = pa->order;
1304 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1305 nodes[i].kind = ORDER_ASM;
1306 nodes[i].u.a = pa;
1307 }
1308
1309 for (i = 0; i < max; ++i)
1310 {
1311 switch (nodes[i].kind)
1312 {
1313 case ORDER_FUNCTION:
1314 nodes[i].u.f->output = 0;
1315 cgraph_expand_function (nodes[i].u.f);
1316 break;
1317
1318 case ORDER_VAR:
1319 varpool_assemble_decl (nodes[i].u.v);
1320 break;
1321
1322 case ORDER_ASM:
1323 assemble_asm (nodes[i].u.a->asm_str);
1324 break;
1325
1326 case ORDER_UNDEFINED:
1327 break;
1328
1329 default:
1330 gcc_unreachable ();
1331 }
1332 }
1333
1334 cgraph_asm_nodes = NULL;
1335 }
1336
1337 /* Mark visibility of all functions.
1338
1339 A local function is one whose calls can occur only in the current
1340 compilation unit and all its calls are explicit, so we can change
1341 its calling convention. We simply mark all static functions whose
1342 address is not taken as local.
1343
1344 We also change the TREE_PUBLIC flag of all declarations that are public
1345 in language point of view but we want to overwrite this default
1346 via visibilities for the backend point of view. */
1347
1348 static void
1349 cgraph_function_and_variable_visibility (void)
1350 {
1351 struct cgraph_node *node;
1352 struct varpool_node *vnode;
1353
1354 for (node = cgraph_nodes; node; node = node->next)
1355 {
1356 if (node->reachable
1357 && (DECL_COMDAT (node->decl)
1358 || (!flag_whole_program
1359 && TREE_PUBLIC (node->decl) && !DECL_EXTERNAL (node->decl))))
1360 node->local.externally_visible = true;
1361 if (!node->local.externally_visible && node->analyzed
1362 && !DECL_EXTERNAL (node->decl))
1363 {
1364 gcc_assert (flag_whole_program || !TREE_PUBLIC (node->decl));
1365 TREE_PUBLIC (node->decl) = 0;
1366 }
1367 node->local.local = (!node->needed
1368 && node->analyzed
1369 && !DECL_EXTERNAL (node->decl)
1370 && !node->local.externally_visible);
1371 }
1372 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
1373 {
1374 if (vnode->needed
1375 && !flag_whole_program
1376 && (DECL_COMDAT (vnode->decl) || TREE_PUBLIC (vnode->decl)))
1377 vnode->externally_visible = 1;
1378 if (!vnode->externally_visible)
1379 {
1380 gcc_assert (flag_whole_program || !TREE_PUBLIC (vnode->decl));
1381 TREE_PUBLIC (vnode->decl) = 0;
1382 }
1383 gcc_assert (TREE_STATIC (vnode->decl));
1384 }
1385
1386 /* Because we have to be conservative on the boundaries of source
1387 level units, it is possible that we marked some functions in
1388 reachable just because they might be used later via external
1389 linkage, but after making them local they are really unreachable
1390 now. */
1391 cgraph_remove_unreachable_nodes (true, cgraph_dump_file);
1392
1393 if (cgraph_dump_file)
1394 {
1395 fprintf (cgraph_dump_file, "\nMarking local functions:");
1396 for (node = cgraph_nodes; node; node = node->next)
1397 if (node->local.local)
1398 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1399 fprintf (cgraph_dump_file, "\n\n");
1400 fprintf (cgraph_dump_file, "\nMarking externally visible functions:");
1401 for (node = cgraph_nodes; node; node = node->next)
1402 if (node->local.externally_visible)
1403 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1404 fprintf (cgraph_dump_file, "\n\n");
1405 }
1406 cgraph_function_flags_ready = true;
1407 }
1408
1409 /* Return true when function body of DECL still needs to be kept around
1410 for later re-use. */
1411 bool
1412 cgraph_preserve_function_body_p (tree decl)
1413 {
1414 struct cgraph_node *node;
1415 if (!cgraph_global_info_ready)
1416 return (flag_really_no_inline
1417 ? lang_hooks.tree_inlining.disregard_inline_limits (decl)
1418 : DECL_INLINE (decl));
1419 /* Look if there is any clone around. */
1420 for (node = cgraph_node (decl); node; node = node->next_clone)
1421 if (node->global.inlined_to)
1422 return true;
1423 return false;
1424 }
1425
1426 static void
1427 ipa_passes (void)
1428 {
1429 cfun = NULL;
1430 current_function_decl = NULL;
1431 tree_register_cfg_hooks ();
1432 bitmap_obstack_initialize (NULL);
1433 execute_ipa_pass_list (all_ipa_passes);
1434 bitmap_obstack_release (NULL);
1435 }
1436
1437 /* Perform simple optimizations based on callgraph. */
1438
1439 void
1440 cgraph_optimize (void)
1441 {
1442 if (errorcount || sorrycount)
1443 return;
1444
1445 #ifdef ENABLE_CHECKING
1446 verify_cgraph ();
1447 #endif
1448 if (!flag_unit_at_a_time)
1449 {
1450 cgraph_assemble_pending_functions ();
1451 cgraph_process_new_functions ();
1452 cgraph_state = CGRAPH_STATE_FINISHED;
1453 cgraph_output_pending_asms ();
1454 varpool_assemble_pending_decls ();
1455 varpool_output_debug_info ();
1456 return;
1457 }
1458
1459 /* Frontend may output common variables after the unit has been finalized.
1460 It is safe to deal with them here as they are always zero initialized. */
1461 varpool_analyze_pending_decls ();
1462 cgraph_process_new_functions ();
1463
1464 timevar_push (TV_CGRAPHOPT);
1465 if (pre_ipa_mem_report)
1466 {
1467 fprintf (stderr, "Memory consumption before IPA\n");
1468 dump_memory_report (false);
1469 }
1470 if (!quiet_flag)
1471 fprintf (stderr, "Performing interprocedural optimizations\n");
1472
1473 cgraph_function_and_variable_visibility ();
1474 if (cgraph_dump_file)
1475 {
1476 fprintf (cgraph_dump_file, "Marked ");
1477 dump_cgraph (cgraph_dump_file);
1478 }
1479 cgraph_state = CGRAPH_STATE_IPA;
1480
1481 /* Don't run the IPA passes if there was any error or sorry messages. */
1482 if (errorcount == 0 && sorrycount == 0)
1483 ipa_passes ();
1484
1485 /* This pass remove bodies of extern inline functions we never inlined.
1486 Do this later so other IPA passes see what is really going on. */
1487 cgraph_remove_unreachable_nodes (false, dump_file);
1488 cgraph_increase_alignment ();
1489 cgraph_global_info_ready = true;
1490 if (cgraph_dump_file)
1491 {
1492 fprintf (cgraph_dump_file, "Optimized ");
1493 dump_cgraph (cgraph_dump_file);
1494 dump_varpool (cgraph_dump_file);
1495 }
1496 if (post_ipa_mem_report)
1497 {
1498 fprintf (stderr, "Memory consumption after IPA\n");
1499 dump_memory_report (false);
1500 }
1501 timevar_pop (TV_CGRAPHOPT);
1502
1503 /* Output everything. */
1504 if (!quiet_flag)
1505 fprintf (stderr, "Assembling functions:\n");
1506 #ifdef ENABLE_CHECKING
1507 verify_cgraph ();
1508 #endif
1509
1510 cgraph_mark_functions_to_output ();
1511
1512 cgraph_state = CGRAPH_STATE_EXPANSION;
1513 if (!flag_toplevel_reorder)
1514 cgraph_output_in_order ();
1515 else
1516 {
1517 cgraph_output_pending_asms ();
1518
1519 cgraph_expand_all_functions ();
1520 varpool_remove_unreferenced_decls ();
1521
1522 varpool_assemble_pending_decls ();
1523 varpool_output_debug_info ();
1524 }
1525 cgraph_process_new_functions ();
1526 cgraph_state = CGRAPH_STATE_FINISHED;
1527
1528 if (cgraph_dump_file)
1529 {
1530 fprintf (cgraph_dump_file, "\nFinal ");
1531 dump_cgraph (cgraph_dump_file);
1532 }
1533 #ifdef ENABLE_CHECKING
1534 verify_cgraph ();
1535 /* Double check that all inline clones are gone and that all
1536 function bodies have been released from memory. */
1537 if (flag_unit_at_a_time
1538 && !(sorrycount || errorcount))
1539 {
1540 struct cgraph_node *node;
1541 bool error_found = false;
1542
1543 for (node = cgraph_nodes; node; node = node->next)
1544 if (node->analyzed
1545 && (node->global.inlined_to
1546 || DECL_SAVED_TREE (node->decl)))
1547 {
1548 error_found = true;
1549 dump_cgraph_node (stderr, node);
1550 }
1551 if (error_found)
1552 internal_error ("nodes with no released memory found");
1553 }
1554 #endif
1555 }
1556
1557 /* Increase alignment of global arrays to improve vectorization potential.
1558 TODO:
1559 - Consider also structs that have an array field.
1560 - Use ipa analysis to prune arrays that can't be vectorized?
1561 This should involve global alignment analysis and in the future also
1562 array padding. */
1563
1564 static void
1565 cgraph_increase_alignment (void)
1566 {
1567 if (flag_section_anchors && flag_tree_vectorize)
1568 {
1569 struct varpool_node *vnode;
1570
1571 /* Increase the alignment of all global arrays for vectorization. */
1572 for (vnode = varpool_nodes_queue;
1573 vnode;
1574 vnode = vnode->next_needed)
1575 {
1576 tree vectype, decl = vnode->decl;
1577 unsigned int alignment;
1578
1579 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
1580 continue;
1581 vectype = get_vectype_for_scalar_type (TREE_TYPE (TREE_TYPE (decl)));
1582 if (!vectype)
1583 continue;
1584 alignment = TYPE_ALIGN (vectype);
1585 if (DECL_ALIGN (decl) >= alignment)
1586 continue;
1587
1588 if (vect_can_force_dr_alignment_p (decl, alignment))
1589 {
1590 DECL_ALIGN (decl) = TYPE_ALIGN (vectype);
1591 DECL_USER_ALIGN (decl) = 1;
1592 if (cgraph_dump_file)
1593 {
1594 fprintf (cgraph_dump_file, "Increasing alignment of decl: ");
1595 print_generic_expr (cgraph_dump_file, decl, TDF_SLIM);
1596 }
1597 }
1598 }
1599 }
1600 }
1601
1602 /* Generate and emit a static constructor or destructor. WHICH must be
1603 one of 'I' or 'D'. BODY should be a STATEMENT_LIST containing
1604 GENERIC statements. */
1605
1606 void
1607 cgraph_build_static_cdtor (char which, tree body, int priority)
1608 {
1609 static int counter = 0;
1610 char which_buf[16];
1611 tree decl, name, resdecl;
1612
1613 sprintf (which_buf, "%c_%d", which, counter++);
1614 name = get_file_function_name (which_buf);
1615
1616 decl = build_decl (FUNCTION_DECL, name,
1617 build_function_type (void_type_node, void_list_node));
1618 current_function_decl = decl;
1619
1620 resdecl = build_decl (RESULT_DECL, NULL_TREE, void_type_node);
1621 DECL_ARTIFICIAL (resdecl) = 1;
1622 DECL_IGNORED_P (resdecl) = 1;
1623 DECL_RESULT (decl) = resdecl;
1624
1625 allocate_struct_function (decl);
1626
1627 TREE_STATIC (decl) = 1;
1628 TREE_USED (decl) = 1;
1629 DECL_ARTIFICIAL (decl) = 1;
1630 DECL_IGNORED_P (decl) = 1;
1631 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1632 DECL_SAVED_TREE (decl) = body;
1633 TREE_PUBLIC (decl) = ! targetm.have_ctors_dtors;
1634 DECL_UNINLINABLE (decl) = 1;
1635
1636 DECL_INITIAL (decl) = make_node (BLOCK);
1637 TREE_USED (DECL_INITIAL (decl)) = 1;
1638
1639 DECL_SOURCE_LOCATION (decl) = input_location;
1640 cfun->function_end_locus = input_location;
1641
1642 switch (which)
1643 {
1644 case 'I':
1645 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1646 break;
1647 case 'D':
1648 DECL_STATIC_DESTRUCTOR (decl) = 1;
1649 break;
1650 default:
1651 gcc_unreachable ();
1652 }
1653
1654 gimplify_function_tree (decl);
1655
1656 cgraph_add_new_function (decl, false);
1657 cgraph_mark_needed_node (cgraph_node (decl));
1658
1659 if (targetm.have_ctors_dtors)
1660 {
1661 void (*fn) (rtx, int);
1662
1663 if (which == 'I')
1664 fn = targetm.asm_out.constructor;
1665 else
1666 fn = targetm.asm_out.destructor;
1667 fn (XEXP (DECL_RTL (decl), 0), priority);
1668 }
1669 }
1670
1671 void
1672 init_cgraph (void)
1673 {
1674 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1675 }
1676
1677 /* The edges representing the callers of the NEW_VERSION node were
1678 fixed by cgraph_function_versioning (), now the call_expr in their
1679 respective tree code should be updated to call the NEW_VERSION. */
1680
1681 static void
1682 update_call_expr (struct cgraph_node *new_version)
1683 {
1684 struct cgraph_edge *e;
1685
1686 gcc_assert (new_version);
1687 for (e = new_version->callers; e; e = e->next_caller)
1688 /* Update the call expr on the edges
1689 to call the new version. */
1690 TREE_OPERAND (TREE_OPERAND (get_call_expr_in (e->call_stmt), 0), 0) = new_version->decl;
1691 }
1692
1693
1694 /* Create a new cgraph node which is the new version of
1695 OLD_VERSION node. REDIRECT_CALLERS holds the callers
1696 edges which should be redirected to point to
1697 NEW_VERSION. ALL the callees edges of OLD_VERSION
1698 are cloned to the new version node. Return the new
1699 version node. */
1700
1701 static struct cgraph_node *
1702 cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
1703 tree new_decl,
1704 VEC(cgraph_edge_p,heap) *redirect_callers)
1705 {
1706 struct cgraph_node *new_version;
1707 struct cgraph_edge *e, *new_e;
1708 struct cgraph_edge *next_callee;
1709 unsigned i;
1710
1711 gcc_assert (old_version);
1712
1713 new_version = cgraph_node (new_decl);
1714
1715 new_version->analyzed = true;
1716 new_version->local = old_version->local;
1717 new_version->global = old_version->global;
1718 new_version->rtl = new_version->rtl;
1719 new_version->reachable = true;
1720 new_version->count = old_version->count;
1721
1722 /* Clone the old node callees. Recursive calls are
1723 also cloned. */
1724 for (e = old_version->callees;e; e=e->next_callee)
1725 {
1726 new_e = cgraph_clone_edge (e, new_version, e->call_stmt, 0, e->loop_nest, true);
1727 new_e->count = e->count;
1728 }
1729 /* Fix recursive calls.
1730 If OLD_VERSION has a recursive call after the
1731 previous edge cloning, the new version will have an edge
1732 pointing to the old version, which is wrong;
1733 Redirect it to point to the new version. */
1734 for (e = new_version->callees ; e; e = next_callee)
1735 {
1736 next_callee = e->next_callee;
1737 if (e->callee == old_version)
1738 cgraph_redirect_edge_callee (e, new_version);
1739
1740 if (!next_callee)
1741 break;
1742 }
1743 for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
1744 {
1745 /* Redirect calls to the old version node to point to its new
1746 version. */
1747 cgraph_redirect_edge_callee (e, new_version);
1748 }
1749
1750 return new_version;
1751 }
1752
1753 /* Perform function versioning.
1754 Function versioning includes copying of the tree and
1755 a callgraph update (creating a new cgraph node and updating
1756 its callees and callers).
1757
1758 REDIRECT_CALLERS varray includes the edges to be redirected
1759 to the new version.
1760
1761 TREE_MAP is a mapping of tree nodes we want to replace with
1762 new ones (according to results of prior analysis).
1763 OLD_VERSION_NODE is the node that is versioned.
1764 It returns the new version's cgraph node. */
1765
1766 struct cgraph_node *
1767 cgraph_function_versioning (struct cgraph_node *old_version_node,
1768 VEC(cgraph_edge_p,heap) *redirect_callers,
1769 varray_type tree_map)
1770 {
1771 tree old_decl = old_version_node->decl;
1772 struct cgraph_node *new_version_node = NULL;
1773 tree new_decl;
1774
1775 if (!tree_versionable_function_p (old_decl))
1776 return NULL;
1777
1778 /* Make a new FUNCTION_DECL tree node for the
1779 new version. */
1780 new_decl = copy_node (old_decl);
1781
1782 /* Create the new version's call-graph node.
1783 and update the edges of the new node. */
1784 new_version_node =
1785 cgraph_copy_node_for_versioning (old_version_node, new_decl,
1786 redirect_callers);
1787
1788 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1789 tree_function_versioning (old_decl, new_decl, tree_map, false);
1790 /* Update the call_expr on the edges to call the new version node. */
1791 update_call_expr (new_version_node);
1792
1793 /* Update the new version's properties.
1794 Make The new version visible only within this translation unit.
1795 ??? We cannot use COMDAT linkage because there is no
1796 ABI support for this. */
1797 DECL_EXTERNAL (new_version_node->decl) = 0;
1798 DECL_ONE_ONLY (new_version_node->decl) = 0;
1799 TREE_PUBLIC (new_version_node->decl) = 0;
1800 DECL_COMDAT (new_version_node->decl) = 0;
1801 new_version_node->local.externally_visible = 0;
1802 new_version_node->local.local = 1;
1803 new_version_node->lowered = true;
1804 return new_version_node;
1805 }
1806
1807 /* Produce separate function body for inline clones so the offline copy can be
1808 modified without affecting them. */
1809 struct cgraph_node *
1810 save_inline_function_body (struct cgraph_node *node)
1811 {
1812 struct cgraph_node *first_clone;
1813
1814 gcc_assert (node == cgraph_node (node->decl));
1815
1816 cgraph_lower_function (node);
1817
1818 /* In non-unit-at-a-time we construct full fledged clone we never output to
1819 assembly file. This clone is pointed out by inline_decl of original function
1820 and inlining infrastructure knows how to deal with this. */
1821 if (!flag_unit_at_a_time)
1822 {
1823 struct cgraph_edge *e;
1824
1825 first_clone = cgraph_clone_node (node, node->count, 0, false);
1826 first_clone->needed = 0;
1827 first_clone->reachable = 1;
1828 /* Recursively clone all bodies. */
1829 for (e = first_clone->callees; e; e = e->next_callee)
1830 if (!e->inline_failed)
1831 cgraph_clone_inlined_nodes (e, true, false);
1832 }
1833 else
1834 first_clone = node->next_clone;
1835
1836 first_clone->decl = copy_node (node->decl);
1837 node->next_clone = NULL;
1838 if (!flag_unit_at_a_time)
1839 node->inline_decl = first_clone->decl;
1840 first_clone->prev_clone = NULL;
1841 cgraph_insert_node_to_hashtable (first_clone);
1842 gcc_assert (first_clone == cgraph_node (first_clone->decl));
1843
1844 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1845 tree_function_versioning (node->decl, first_clone->decl, NULL, true);
1846
1847 DECL_EXTERNAL (first_clone->decl) = 0;
1848 DECL_ONE_ONLY (first_clone->decl) = 0;
1849 TREE_PUBLIC (first_clone->decl) = 0;
1850 DECL_COMDAT (first_clone->decl) = 0;
1851
1852 for (node = first_clone->next_clone; node; node = node->next_clone)
1853 node->decl = first_clone->decl;
1854 #ifdef ENABLE_CHECKING
1855 verify_cgraph_node (first_clone);
1856 #endif
1857 return first_clone;
1858 }