cgraphunit.c (cgraph_optimize): Call cgraph_add_new_functions before starting IPA...
[gcc.git] / gcc / cgraphunit.c
1 /* Callgraph based interprocedural optimizations.
2 Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
21
22 /* This module implements main driver of compilation process as well as
23 few basic interprocedural optimizers.
24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
35 (There is one exception needed for implementing GCC extern inline
36 function.)
37
38 - varpool_finalize_variable
39
40 This function has same behavior as the above but is used for static
41 variables.
42
43 - cgraph_finalize_compilation_unit
44
45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
47
48 In the unit-at-a-time the call-graph construction and local function
49 analysis takes place here. Bodies of unreachable functions are released
50 to conserve memory usage.
51
52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
54
55 - cgraph_optimize
56
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
61
62 - cgraph_mark_needed_node
63 - varpool_mark_needed_node
64
65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
69 used by C++ frontend to explicitly mark the keyed methods.
70
71 - analyze_expr callback
72
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
76
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
79
80 - expand_function callback
81
82 This function is used to expand function and pass it into RTL back-end.
83 Front-end should not make any assumptions about when this function can be
84 called. In particular cgraph_assemble_pending_functions,
85 varpool_assemble_pending_variables, cgraph_finalize_function,
86 varpool_finalize_function, cgraph_optimize can cause arbitrarily
87 previously finalized functions to be expanded.
88
89 We implement two compilation modes.
90
91 - unit-at-a-time: In this mode analyzing of all functions is deferred
92 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
93
94 In cgraph_finalize_compilation_unit the reachable functions are
95 analyzed. During analysis the call-graph edges from reachable
96 functions are constructed and their destinations are marked as
97 reachable. References to functions and variables are discovered too
98 and variables found to be needed output to the assembly file. Via
99 mark_referenced call in assemble_variable functions referenced by
100 static variables are noticed too.
101
102 The intra-procedural information is produced and its existence
103 indicated by global_info_ready. Once this flag is set it is impossible
104 to change function from !reachable to reachable and thus
105 assemble_variable no longer call mark_referenced.
106
107 Finally the call-graph is topologically sorted and all reachable functions
108 that has not been completely inlined or are not external are output.
109
110 ??? It is possible that reference to function or variable is optimized
111 out. We can not deal with this nicely because topological order is not
112 suitable for it. For tree-ssa we may consider another pass doing
113 optimization and re-discovering reachable functions.
114
115 ??? Reorganize code so variables are output very last and only if they
116 really has been referenced by produced code, so we catch more cases
117 where reference has been optimized out.
118
119 - non-unit-at-a-time
120
121 All functions are variables are output as early as possible to conserve
122 memory consumption. This may or may not result in less memory used but
123 it is still needed for some legacy code that rely on particular ordering
124 of things output from the compiler.
125
126 Varpool data structures are not used and variables are output directly.
127
128 Functions are output early using call of
129 cgraph_assemble_pending_function from cgraph_finalize_function. The
130 decision on whether function is needed is made more conservative so
131 uninlininable static functions are needed too. During the call-graph
132 construction the edge destinations are not marked as reachable and it
133 is completely relied upn assemble_variable to mark them. */
134
135
136 #include "config.h"
137 #include "system.h"
138 #include "coretypes.h"
139 #include "tm.h"
140 #include "tree.h"
141 #include "rtl.h"
142 #include "tree-flow.h"
143 #include "tree-inline.h"
144 #include "langhooks.h"
145 #include "pointer-set.h"
146 #include "toplev.h"
147 #include "flags.h"
148 #include "ggc.h"
149 #include "debug.h"
150 #include "target.h"
151 #include "cgraph.h"
152 #include "diagnostic.h"
153 #include "timevar.h"
154 #include "params.h"
155 #include "fibheap.h"
156 #include "c-common.h"
157 #include "intl.h"
158 #include "function.h"
159 #include "ipa-prop.h"
160 #include "tree-gimple.h"
161 #include "tree-pass.h"
162 #include "output.h"
163
164 static void cgraph_expand_all_functions (void);
165 static void cgraph_mark_functions_to_output (void);
166 static void cgraph_expand_function (struct cgraph_node *);
167 static tree record_reference (tree *, int *, void *);
168 static void cgraph_output_pending_asms (void);
169 static void cgraph_increase_alignment (void);
170 static void initialize_inline_failed (struct cgraph_node *);
171
172 /* Records tree nodes seen in record_reference. Simply using
173 walk_tree_without_duplicates doesn't guarantee each node is visited
174 once because it gets a new htab upon each recursive call from
175 record_reference itself. */
176 static struct pointer_set_t *visited_nodes;
177
178 static FILE *cgraph_dump_file;
179
180 /* Determine if function DECL is needed. That is, visible to something
181 either outside this translation unit, something magic in the system
182 configury, or (if not doing unit-at-a-time) to something we havn't
183 seen yet. */
184
185 static bool
186 decide_is_function_needed (struct cgraph_node *node, tree decl)
187 {
188 tree origin;
189 if (MAIN_NAME_P (DECL_NAME (decl))
190 && TREE_PUBLIC (decl))
191 {
192 node->local.externally_visible = true;
193 return true;
194 }
195
196 /* If the user told us it is used, then it must be so. */
197 if (node->local.externally_visible)
198 return true;
199
200 if (!flag_unit_at_a_time && lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
201 return true;
202
203 /* ??? If the assembler name is set by hand, it is possible to assemble
204 the name later after finalizing the function and the fact is noticed
205 in assemble_name then. This is arguably a bug. */
206 if (DECL_ASSEMBLER_NAME_SET_P (decl)
207 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
208 return true;
209
210 /* If we decided it was needed before, but at the time we didn't have
211 the body of the function available, then it's still needed. We have
212 to go back and re-check its dependencies now. */
213 if (node->needed)
214 return true;
215
216 /* Externally visible functions must be output. The exception is
217 COMDAT functions that must be output only when they are needed.
218
219 When not optimizing, also output the static functions. (see
220 PR24561), but don't do so for always_inline functions, functions
221 declared inline and nested functions. These was optimized out
222 in the original implementation and it is unclear whether we want
223 to change the behavior here. */
224 if (((TREE_PUBLIC (decl)
225 || (!optimize && !node->local.disregard_inline_limits
226 && !DECL_DECLARED_INLINE_P (decl)
227 && !node->origin))
228 && !flag_whole_program)
229 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
230 return true;
231
232 /* Constructors and destructors are reachable from the runtime by
233 some mechanism. */
234 if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
235 return true;
236
237 if (flag_unit_at_a_time)
238 return false;
239
240 /* If not doing unit at a time, then we'll only defer this function
241 if its marked for inlining. Otherwise we want to emit it now. */
242
243 /* "extern inline" functions are never output locally. */
244 if (DECL_EXTERNAL (decl))
245 return false;
246 /* Nested functions of extern inline function shall not be emit unless
247 we inlined the origin. */
248 for (origin = decl_function_context (decl); origin;
249 origin = decl_function_context (origin))
250 if (DECL_EXTERNAL (origin))
251 return false;
252 /* We want to emit COMDAT functions only when absolutely necessary. */
253 if (DECL_COMDAT (decl))
254 return false;
255 if (!DECL_INLINE (decl)
256 || (!node->local.disregard_inline_limits
257 /* When declared inline, defer even the uninlinable functions.
258 This allows them to be eliminated when unused. */
259 && !DECL_DECLARED_INLINE_P (decl)
260 && (!node->local.inlinable || !cgraph_default_inline_p (node, NULL))))
261 return true;
262
263 return false;
264 }
265
266 /* Process CGRAPH_NEW_FUNCTIONS and perform actions neccesary to add these
267 functions into callgraph in a way so they look like ordinary reachable
268 functions inserted into callgraph already at construction time. */
269
270 bool
271 cgraph_process_new_functions (void)
272 {
273 bool output = false;
274 tree fndecl;
275 struct cgraph_node *node;
276
277 /* Note that this queue may grow as its being processed, as the new
278 functions may generate new ones. */
279 while (cgraph_new_nodes)
280 {
281 node = cgraph_new_nodes;
282 fndecl = node->decl;
283 cgraph_new_nodes = cgraph_new_nodes->next_needed;
284 switch (cgraph_state)
285 {
286 case CGRAPH_STATE_CONSTRUCTION:
287 /* At construction time we just need to finalize function and move
288 it into reachable functions list. */
289
290 node->next_needed = NULL;
291 cgraph_finalize_function (fndecl, false);
292 cgraph_mark_reachable_node (node);
293 output = true;
294 break;
295
296 case CGRAPH_STATE_IPA:
297 /* When IPA optimization already started, do all essential
298 transformations that has been already performed on the whole
299 cgraph but not on this function. */
300
301 tree_register_cfg_hooks ();
302 if (!node->analyzed)
303 cgraph_analyze_function (node);
304 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
305 current_function_decl = fndecl;
306 node->local.inlinable = tree_inlinable_function_p (fndecl);
307 node->local.self_insns = estimate_num_insns (fndecl);
308 node->local.disregard_inline_limits
309 = lang_hooks.tree_inlining.disregard_inline_limits (fndecl);
310 /* Inlining characteristics are maintained by the
311 cgraph_mark_inline. */
312 node->global.insns = node->local.self_insns;
313 initialize_inline_failed (node);
314 if (flag_really_no_inline && !node->local.disregard_inline_limits)
315 node->local.inlinable = 0;
316 free_dominance_info (CDI_POST_DOMINATORS);
317 free_dominance_info (CDI_DOMINATORS);
318 pop_cfun ();
319 current_function_decl = NULL;
320 break;
321
322 case CGRAPH_STATE_EXPANSION:
323 /* Functions created during expansion shall be compiled
324 directly. */
325 node->output = 0;
326 cgraph_expand_function (node);
327 break;
328
329 default:
330 gcc_unreachable ();
331 break;
332 }
333 }
334 return output;
335 }
336
337 /* When not doing unit-at-a-time, output all functions enqueued.
338 Return true when such a functions were found. */
339
340 static bool
341 cgraph_assemble_pending_functions (void)
342 {
343 bool output = false;
344
345 if (flag_unit_at_a_time)
346 return false;
347
348 cgraph_output_pending_asms ();
349
350 while (cgraph_nodes_queue)
351 {
352 struct cgraph_node *n = cgraph_nodes_queue;
353
354 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
355 n->next_needed = NULL;
356 if (!n->global.inlined_to
357 && !n->alias
358 && !DECL_EXTERNAL (n->decl))
359 {
360 cgraph_expand_function (n);
361 output = true;
362 }
363 output |= cgraph_process_new_functions ();
364 }
365
366 return output;
367 }
368
369
370 /* As an GCC extension we allow redefinition of the function. The
371 semantics when both copies of bodies differ is not well defined.
372 We replace the old body with new body so in unit at a time mode
373 we always use new body, while in normal mode we may end up with
374 old body inlined into some functions and new body expanded and
375 inlined in others.
376
377 ??? It may make more sense to use one body for inlining and other
378 body for expanding the function but this is difficult to do. */
379
380 static void
381 cgraph_reset_node (struct cgraph_node *node)
382 {
383 /* If node->output is set, then this is a unit-at-a-time compilation
384 and we have already begun whole-unit analysis. This is *not*
385 testing for whether we've already emitted the function. That
386 case can be sort-of legitimately seen with real function
387 redefinition errors. I would argue that the front end should
388 never present us with such a case, but don't enforce that for now. */
389 gcc_assert (!node->output);
390
391 /* Reset our data structures so we can analyze the function again. */
392 memset (&node->local, 0, sizeof (node->local));
393 memset (&node->global, 0, sizeof (node->global));
394 memset (&node->rtl, 0, sizeof (node->rtl));
395 node->analyzed = false;
396 node->local.redefined_extern_inline = true;
397 node->local.finalized = false;
398
399 if (!flag_unit_at_a_time)
400 {
401 struct cgraph_node *n, *next;
402
403 for (n = cgraph_nodes; n; n = next)
404 {
405 next = n->next;
406 if (n->global.inlined_to == node)
407 cgraph_remove_node (n);
408 }
409 }
410
411 cgraph_node_remove_callees (node);
412
413 /* We may need to re-queue the node for assembling in case
414 we already proceeded it and ignored as not needed. */
415 if (node->reachable && !flag_unit_at_a_time)
416 {
417 struct cgraph_node *n;
418
419 for (n = cgraph_nodes_queue; n; n = n->next_needed)
420 if (n == node)
421 break;
422 if (!n)
423 node->reachable = 0;
424 }
425 }
426
427 static void
428 cgraph_lower_function (struct cgraph_node *node)
429 {
430 if (node->lowered)
431 return;
432 tree_lowering_passes (node->decl);
433 node->lowered = true;
434 }
435
436 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
437 logic in effect. If NESTED is true, then our caller cannot stand to have
438 the garbage collector run at the moment. We would need to either create
439 a new GC context, or just not compile right now. */
440
441 void
442 cgraph_finalize_function (tree decl, bool nested)
443 {
444 struct cgraph_node *node = cgraph_node (decl);
445
446 if (node->local.finalized)
447 cgraph_reset_node (node);
448
449 notice_global_symbol (decl);
450 node->decl = decl;
451 node->local.finalized = true;
452 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
453 if (node->nested)
454 lower_nested_functions (decl);
455 gcc_assert (!node->nested);
456
457 /* If not unit at a time, then we need to create the call graph
458 now, so that called functions can be queued and emitted now. */
459 if (!flag_unit_at_a_time)
460 {
461 cgraph_analyze_function (node);
462 cgraph_decide_inlining_incrementally (node, false);
463 }
464
465 if (decide_is_function_needed (node, decl))
466 cgraph_mark_needed_node (node);
467
468 /* Since we reclaim unreachable nodes at the end of every language
469 level unit, we need to be conservative about possible entry points
470 there. */
471 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)))
472 cgraph_mark_reachable_node (node);
473
474 /* If not unit at a time, go ahead and emit everything we've found
475 to be reachable at this time. */
476 if (!nested)
477 {
478 if (!cgraph_assemble_pending_functions ())
479 ggc_collect ();
480 }
481
482 /* If we've not yet emitted decl, tell the debug info about it. */
483 if (!TREE_ASM_WRITTEN (decl))
484 (*debug_hooks->deferred_inline_function) (decl);
485
486 /* Possibly warn about unused parameters. */
487 if (warn_unused_parameter)
488 do_warn_unused_parameter (decl);
489 }
490
491 /* Walk tree and record all calls. Called via walk_tree. */
492 static tree
493 record_reference (tree *tp, int *walk_subtrees, void *data)
494 {
495 tree t = *tp;
496
497 switch (TREE_CODE (t))
498 {
499 case VAR_DECL:
500 /* ??? Really, we should mark this decl as *potentially* referenced
501 by this function and re-examine whether the decl is actually used
502 after rtl has been generated. */
503 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
504 {
505 varpool_mark_needed_node (varpool_node (t));
506 if (lang_hooks.callgraph.analyze_expr)
507 return lang_hooks.callgraph.analyze_expr (tp, walk_subtrees,
508 data);
509 }
510 break;
511
512 case FDESC_EXPR:
513 case ADDR_EXPR:
514 if (flag_unit_at_a_time)
515 {
516 /* Record dereferences to the functions. This makes the
517 functions reachable unconditionally. */
518 tree decl = TREE_OPERAND (*tp, 0);
519 if (TREE_CODE (decl) == FUNCTION_DECL)
520 cgraph_mark_needed_node (cgraph_node (decl));
521 }
522 break;
523
524 default:
525 /* Save some cycles by not walking types and declaration as we
526 won't find anything useful there anyway. */
527 if (IS_TYPE_OR_DECL_P (*tp))
528 {
529 *walk_subtrees = 0;
530 break;
531 }
532
533 if ((unsigned int) TREE_CODE (t) >= LAST_AND_UNUSED_TREE_CODE)
534 return lang_hooks.callgraph.analyze_expr (tp, walk_subtrees, data);
535 break;
536 }
537
538 return NULL;
539 }
540
541 /* Create cgraph edges for function calls inside BODY from NODE. */
542
543 static void
544 cgraph_create_edges (struct cgraph_node *node, tree body)
545 {
546 basic_block bb;
547
548 struct function *this_cfun = DECL_STRUCT_FUNCTION (body);
549 block_stmt_iterator bsi;
550 tree step;
551 visited_nodes = pointer_set_create ();
552
553 /* Reach the trees by walking over the CFG, and note the
554 enclosing basic-blocks in the call edges. */
555 FOR_EACH_BB_FN (bb, this_cfun)
556 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
557 {
558 tree stmt = bsi_stmt (bsi);
559 tree call = get_call_expr_in (stmt);
560 tree decl;
561
562 if (call && (decl = get_callee_fndecl (call)))
563 {
564 cgraph_create_edge (node, cgraph_node (decl), stmt,
565 bb->count,
566 bb->loop_depth);
567 walk_tree (&TREE_OPERAND (call, 1),
568 record_reference, node, visited_nodes);
569 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
570 walk_tree (&GIMPLE_STMT_OPERAND (stmt, 0),
571 record_reference, node, visited_nodes);
572 }
573 else
574 walk_tree (bsi_stmt_ptr (bsi), record_reference, node, visited_nodes);
575 }
576
577 /* Look for initializers of constant variables and private statics. */
578 for (step = DECL_STRUCT_FUNCTION (body)->unexpanded_var_list;
579 step;
580 step = TREE_CHAIN (step))
581 {
582 tree decl = TREE_VALUE (step);
583 if (TREE_CODE (decl) == VAR_DECL
584 && (TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
585 && flag_unit_at_a_time)
586 varpool_finalize_decl (decl);
587 else if (TREE_CODE (decl) == VAR_DECL && DECL_INITIAL (decl))
588 walk_tree (&DECL_INITIAL (decl), record_reference, node, visited_nodes);
589 }
590
591 pointer_set_destroy (visited_nodes);
592 visited_nodes = NULL;
593 }
594
595 void
596 record_references_in_initializer (tree decl)
597 {
598 visited_nodes = pointer_set_create ();
599 walk_tree (&DECL_INITIAL (decl), record_reference, NULL, visited_nodes);
600 pointer_set_destroy (visited_nodes);
601 visited_nodes = NULL;
602 }
603
604
605 /* Give initial reasons why inlining would fail. Those gets
606 either NULLified or usually overwritten by more precise reason
607 later. */
608 static void
609 initialize_inline_failed (struct cgraph_node *node)
610 {
611 struct cgraph_edge *e;
612
613 for (e = node->callers; e; e = e->next_caller)
614 {
615 gcc_assert (!e->callee->global.inlined_to);
616 gcc_assert (e->inline_failed);
617 if (node->local.redefined_extern_inline)
618 e->inline_failed = N_("redefined extern inline functions are not "
619 "considered for inlining");
620 else if (!node->local.inlinable)
621 e->inline_failed = N_("function not inlinable");
622 else
623 e->inline_failed = N_("function not considered for inlining");
624 }
625 }
626
627 /* Rebuild call edges from current function after a passes not aware
628 of cgraph updating. */
629 static unsigned int
630 rebuild_cgraph_edges (void)
631 {
632 basic_block bb;
633 struct cgraph_node *node = cgraph_node (current_function_decl);
634 block_stmt_iterator bsi;
635
636 cgraph_node_remove_callees (node);
637
638 node->count = ENTRY_BLOCK_PTR->count;
639
640 FOR_EACH_BB (bb)
641 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
642 {
643 tree stmt = bsi_stmt (bsi);
644 tree call = get_call_expr_in (stmt);
645 tree decl;
646
647 if (call && (decl = get_callee_fndecl (call)))
648 cgraph_create_edge (node, cgraph_node (decl), stmt,
649 bb->count,
650 bb->loop_depth);
651 }
652 initialize_inline_failed (node);
653 gcc_assert (!node->global.inlined_to);
654 return 0;
655 }
656
657 struct tree_opt_pass pass_rebuild_cgraph_edges =
658 {
659 NULL, /* name */
660 NULL, /* gate */
661 rebuild_cgraph_edges, /* execute */
662 NULL, /* sub */
663 NULL, /* next */
664 0, /* static_pass_number */
665 0, /* tv_id */
666 PROP_cfg, /* properties_required */
667 0, /* properties_provided */
668 0, /* properties_destroyed */
669 0, /* todo_flags_start */
670 0, /* todo_flags_finish */
671 0 /* letter */
672 };
673
674 /* Verify cgraph nodes of given cgraph node. */
675 void
676 verify_cgraph_node (struct cgraph_node *node)
677 {
678 struct cgraph_edge *e;
679 struct cgraph_node *main_clone;
680 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
681 basic_block this_block;
682 block_stmt_iterator bsi;
683 bool error_found = false;
684
685 if (errorcount || sorrycount)
686 return;
687
688 timevar_push (TV_CGRAPH_VERIFY);
689 for (e = node->callees; e; e = e->next_callee)
690 if (e->aux)
691 {
692 error ("aux field set for edge %s->%s",
693 cgraph_node_name (e->caller), cgraph_node_name (e->callee));
694 error_found = true;
695 }
696 if (node->count < 0)
697 {
698 error ("Execution count is negative");
699 error_found = true;
700 }
701 for (e = node->callers; e; e = e->next_caller)
702 {
703 if (e->count < 0)
704 {
705 error ("caller edge count is negative");
706 error_found = true;
707 }
708 if (!e->inline_failed)
709 {
710 if (node->global.inlined_to
711 != (e->caller->global.inlined_to
712 ? e->caller->global.inlined_to : e->caller))
713 {
714 error ("inlined_to pointer is wrong");
715 error_found = true;
716 }
717 if (node->callers->next_caller)
718 {
719 error ("multiple inline callers");
720 error_found = true;
721 }
722 }
723 else
724 if (node->global.inlined_to)
725 {
726 error ("inlined_to pointer set for noninline callers");
727 error_found = true;
728 }
729 }
730 if (!node->callers && node->global.inlined_to)
731 {
732 error ("inlined_to pointer is set but no predecessors found");
733 error_found = true;
734 }
735 if (node->global.inlined_to == node)
736 {
737 error ("inlined_to pointer refers to itself");
738 error_found = true;
739 }
740
741 for (main_clone = cgraph_node (node->decl); main_clone;
742 main_clone = main_clone->next_clone)
743 if (main_clone == node)
744 break;
745 if (!cgraph_node (node->decl))
746 {
747 error ("node not found in cgraph_hash");
748 error_found = true;
749 }
750
751 if (node->analyzed
752 && DECL_SAVED_TREE (node->decl) && !TREE_ASM_WRITTEN (node->decl)
753 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to))
754 {
755 if (this_cfun->cfg)
756 {
757 /* The nodes we're interested in are never shared, so walk
758 the tree ignoring duplicates. */
759 visited_nodes = pointer_set_create ();
760 /* Reach the trees by walking over the CFG, and note the
761 enclosing basic-blocks in the call edges. */
762 FOR_EACH_BB_FN (this_block, this_cfun)
763 for (bsi = bsi_start (this_block); !bsi_end_p (bsi); bsi_next (&bsi))
764 {
765 tree stmt = bsi_stmt (bsi);
766 tree call = get_call_expr_in (stmt);
767 tree decl;
768 if (call && (decl = get_callee_fndecl (call)))
769 {
770 struct cgraph_edge *e = cgraph_edge (node, stmt);
771 if (e)
772 {
773 if (e->aux)
774 {
775 error ("shared call_stmt:");
776 debug_generic_stmt (stmt);
777 error_found = true;
778 }
779 if (e->callee->decl != cgraph_node (decl)->decl
780 && e->inline_failed)
781 {
782 error ("edge points to wrong declaration:");
783 debug_tree (e->callee->decl);
784 fprintf (stderr," Instead of:");
785 debug_tree (decl);
786 }
787 e->aux = (void *)1;
788 }
789 else
790 {
791 error ("missing callgraph edge for call stmt:");
792 debug_generic_stmt (stmt);
793 error_found = true;
794 }
795 }
796 }
797 pointer_set_destroy (visited_nodes);
798 visited_nodes = NULL;
799 }
800 else
801 /* No CFG available?! */
802 gcc_unreachable ();
803
804 for (e = node->callees; e; e = e->next_callee)
805 {
806 if (!e->aux)
807 {
808 error ("edge %s->%s has no corresponding call_stmt",
809 cgraph_node_name (e->caller),
810 cgraph_node_name (e->callee));
811 debug_generic_stmt (e->call_stmt);
812 error_found = true;
813 }
814 e->aux = 0;
815 }
816 }
817 if (error_found)
818 {
819 dump_cgraph_node (stderr, node);
820 internal_error ("verify_cgraph_node failed");
821 }
822 timevar_pop (TV_CGRAPH_VERIFY);
823 }
824
825 /* Verify whole cgraph structure. */
826 void
827 verify_cgraph (void)
828 {
829 struct cgraph_node *node;
830
831 if (sorrycount || errorcount)
832 return;
833
834 for (node = cgraph_nodes; node; node = node->next)
835 verify_cgraph_node (node);
836 }
837
838 /* Output all asm statements we have stored up to be output. */
839
840 static void
841 cgraph_output_pending_asms (void)
842 {
843 struct cgraph_asm_node *can;
844
845 if (errorcount || sorrycount)
846 return;
847
848 for (can = cgraph_asm_nodes; can; can = can->next)
849 assemble_asm (can->asm_str);
850 cgraph_asm_nodes = NULL;
851 }
852
853 /* Analyze the function scheduled to be output. */
854 void
855 cgraph_analyze_function (struct cgraph_node *node)
856 {
857 tree decl = node->decl;
858
859 current_function_decl = decl;
860 push_cfun (DECL_STRUCT_FUNCTION (decl));
861 cgraph_lower_function (node);
862
863 /* First kill forward declaration so reverse inlining works properly. */
864 cgraph_create_edges (node, decl);
865
866 node->local.estimated_self_stack_size = estimated_stack_frame_size ();
867 node->global.estimated_stack_size = node->local.estimated_self_stack_size;
868 node->global.stack_frame_offset = 0;
869 node->local.inlinable = tree_inlinable_function_p (decl);
870 if (!flag_unit_at_a_time)
871 node->local.self_insns = estimate_num_insns (decl);
872 if (node->local.inlinable)
873 node->local.disregard_inline_limits
874 = lang_hooks.tree_inlining.disregard_inline_limits (decl);
875 initialize_inline_failed (node);
876 if (flag_really_no_inline && !node->local.disregard_inline_limits)
877 node->local.inlinable = 0;
878 /* Inlining characteristics are maintained by the cgraph_mark_inline. */
879 node->global.insns = node->local.self_insns;
880
881 node->analyzed = true;
882 pop_cfun ();
883 current_function_decl = NULL;
884 }
885
886 /* Look for externally_visible and used attributes and mark cgraph nodes
887 accordingly.
888
889 We cannot mark the nodes at the point the attributes are processed (in
890 handle_*_attribute) because the copy of the declarations available at that
891 point may not be canonical. For example, in:
892
893 void f();
894 void f() __attribute__((used));
895
896 the declaration we see in handle_used_attribute will be the second
897 declaration -- but the front end will subsequently merge that declaration
898 with the original declaration and discard the second declaration.
899
900 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
901
902 void f() {}
903 void f() __attribute__((externally_visible));
904
905 is valid.
906
907 So, we walk the nodes at the end of the translation unit, applying the
908 attributes at that point. */
909
910 static void
911 process_function_and_variable_attributes (struct cgraph_node *first,
912 struct varpool_node *first_var)
913 {
914 struct cgraph_node *node;
915 struct varpool_node *vnode;
916
917 for (node = cgraph_nodes; node != first; node = node->next)
918 {
919 tree decl = node->decl;
920 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
921 {
922 mark_decl_referenced (decl);
923 if (node->local.finalized)
924 cgraph_mark_needed_node (node);
925 }
926 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
927 {
928 if (! TREE_PUBLIC (node->decl))
929 warning (OPT_Wattributes,
930 "%J%<externally_visible%> attribute have effect only on public objects",
931 node->decl);
932 else
933 {
934 if (node->local.finalized)
935 cgraph_mark_needed_node (node);
936 node->local.externally_visible = true;
937 }
938 }
939 }
940 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
941 {
942 tree decl = vnode->decl;
943 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
944 {
945 mark_decl_referenced (decl);
946 if (vnode->finalized)
947 varpool_mark_needed_node (vnode);
948 }
949 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
950 {
951 if (! TREE_PUBLIC (vnode->decl))
952 warning (OPT_Wattributes,
953 "%J%<externally_visible%> attribute have effect only on public objects",
954 vnode->decl);
955 else
956 {
957 if (vnode->finalized)
958 varpool_mark_needed_node (vnode);
959 vnode->externally_visible = true;
960 }
961 }
962 }
963 }
964
965 /* Analyze the whole compilation unit once it is parsed completely. */
966
967 void
968 cgraph_finalize_compilation_unit (void)
969 {
970 struct cgraph_node *node, *next;
971 /* Keep track of already processed nodes when called multiple times for
972 intermodule optimization. */
973 static struct cgraph_node *first_analyzed;
974 struct cgraph_node *first_processed = first_analyzed;
975 static struct varpool_node *first_analyzed_var;
976
977 if (errorcount || sorrycount)
978 return;
979
980 finish_aliases_1 ();
981
982 if (!flag_unit_at_a_time)
983 {
984 cgraph_output_pending_asms ();
985 cgraph_assemble_pending_functions ();
986 varpool_output_debug_info ();
987 return;
988 }
989
990 if (!quiet_flag)
991 {
992 fprintf (stderr, "\nAnalyzing compilation unit\n");
993 fflush (stderr);
994 }
995
996 timevar_push (TV_CGRAPH);
997 process_function_and_variable_attributes (first_processed,
998 first_analyzed_var);
999 first_processed = cgraph_nodes;
1000 first_analyzed_var = varpool_nodes;
1001 varpool_analyze_pending_decls ();
1002 if (cgraph_dump_file)
1003 {
1004 fprintf (cgraph_dump_file, "Initial entry points:");
1005 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1006 if (node->needed && DECL_SAVED_TREE (node->decl))
1007 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1008 fprintf (cgraph_dump_file, "\n");
1009 }
1010
1011 /* Propagate reachability flag and lower representation of all reachable
1012 functions. In the future, lowering will introduce new functions and
1013 new entry points on the way (by template instantiation and virtual
1014 method table generation for instance). */
1015 while (cgraph_nodes_queue)
1016 {
1017 struct cgraph_edge *edge;
1018 tree decl = cgraph_nodes_queue->decl;
1019
1020 node = cgraph_nodes_queue;
1021 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
1022 node->next_needed = NULL;
1023
1024 /* ??? It is possible to create extern inline function and later using
1025 weak alias attribute to kill its body. See
1026 gcc.c-torture/compile/20011119-1.c */
1027 if (!DECL_SAVED_TREE (decl))
1028 {
1029 cgraph_reset_node (node);
1030 continue;
1031 }
1032
1033 gcc_assert (!node->analyzed && node->reachable);
1034 gcc_assert (DECL_SAVED_TREE (decl));
1035
1036 cgraph_analyze_function (node);
1037
1038 for (edge = node->callees; edge; edge = edge->next_callee)
1039 if (!edge->callee->reachable)
1040 cgraph_mark_reachable_node (edge->callee);
1041
1042 /* We finalize local static variables during constructing callgraph
1043 edges. Process their attributes too. */
1044 process_function_and_variable_attributes (first_processed,
1045 first_analyzed_var);
1046 first_processed = cgraph_nodes;
1047 first_analyzed_var = varpool_nodes;
1048 varpool_analyze_pending_decls ();
1049 }
1050
1051 /* Collect entry points to the unit. */
1052 if (cgraph_dump_file)
1053 {
1054 fprintf (cgraph_dump_file, "Unit entry points:");
1055 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1056 if (node->needed && DECL_SAVED_TREE (node->decl))
1057 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1058 fprintf (cgraph_dump_file, "\n\nInitial ");
1059 dump_cgraph (cgraph_dump_file);
1060 }
1061
1062 if (cgraph_dump_file)
1063 fprintf (cgraph_dump_file, "\nReclaiming functions:");
1064
1065 for (node = cgraph_nodes; node != first_analyzed; node = next)
1066 {
1067 tree decl = node->decl;
1068 next = node->next;
1069
1070 if (node->local.finalized && !DECL_SAVED_TREE (decl))
1071 cgraph_reset_node (node);
1072
1073 if (!node->reachable && DECL_SAVED_TREE (decl))
1074 {
1075 if (cgraph_dump_file)
1076 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1077 cgraph_remove_node (node);
1078 continue;
1079 }
1080 else
1081 node->next_needed = NULL;
1082 gcc_assert (!node->local.finalized || DECL_SAVED_TREE (decl));
1083 gcc_assert (node->analyzed == node->local.finalized);
1084 }
1085 if (cgraph_dump_file)
1086 {
1087 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1088 dump_cgraph (cgraph_dump_file);
1089 }
1090 first_analyzed = cgraph_nodes;
1091 ggc_collect ();
1092 timevar_pop (TV_CGRAPH);
1093 }
1094 /* Figure out what functions we want to assemble. */
1095
1096 static void
1097 cgraph_mark_functions_to_output (void)
1098 {
1099 struct cgraph_node *node;
1100
1101 for (node = cgraph_nodes; node; node = node->next)
1102 {
1103 tree decl = node->decl;
1104 struct cgraph_edge *e;
1105
1106 gcc_assert (!node->output);
1107
1108 for (e = node->callers; e; e = e->next_caller)
1109 if (e->inline_failed)
1110 break;
1111
1112 /* We need to output all local functions that are used and not
1113 always inlined, as well as those that are reachable from
1114 outside the current compilation unit. */
1115 if (DECL_SAVED_TREE (decl)
1116 && !node->global.inlined_to
1117 && (node->needed
1118 || (e && node->reachable))
1119 && !TREE_ASM_WRITTEN (decl)
1120 && !DECL_EXTERNAL (decl))
1121 node->output = 1;
1122 else
1123 {
1124 /* We should've reclaimed all functions that are not needed. */
1125 #ifdef ENABLE_CHECKING
1126 if (!node->global.inlined_to && DECL_SAVED_TREE (decl)
1127 && !DECL_EXTERNAL (decl))
1128 {
1129 dump_cgraph_node (stderr, node);
1130 internal_error ("failed to reclaim unneeded function");
1131 }
1132 #endif
1133 gcc_assert (node->global.inlined_to || !DECL_SAVED_TREE (decl)
1134 || DECL_EXTERNAL (decl));
1135
1136 }
1137
1138 }
1139 }
1140
1141 /* Expand function specified by NODE. */
1142
1143 static void
1144 cgraph_expand_function (struct cgraph_node *node)
1145 {
1146 tree decl = node->decl;
1147
1148 /* We ought to not compile any inline clones. */
1149 gcc_assert (!node->global.inlined_to);
1150
1151 if (flag_unit_at_a_time)
1152 announce_function (decl);
1153
1154 cgraph_lower_function (node);
1155
1156 /* Generate RTL for the body of DECL. */
1157 lang_hooks.callgraph.expand_function (decl);
1158
1159 /* Make sure that BE didn't give up on compiling. */
1160 /* ??? Can happen with nested function of extern inline. */
1161 gcc_assert (TREE_ASM_WRITTEN (node->decl));
1162
1163 current_function_decl = NULL;
1164 if (!cgraph_preserve_function_body_p (node->decl))
1165 {
1166 DECL_SAVED_TREE (node->decl) = NULL;
1167 DECL_STRUCT_FUNCTION (node->decl) = NULL;
1168 DECL_INITIAL (node->decl) = error_mark_node;
1169 /* Eliminate all call edges. This is important so the call_expr no longer
1170 points to the dead function body. */
1171 cgraph_node_remove_callees (node);
1172 }
1173
1174 cgraph_function_flags_ready = true;
1175 }
1176
1177 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
1178
1179 bool
1180 cgraph_inline_p (struct cgraph_edge *e, const char **reason)
1181 {
1182 *reason = e->inline_failed;
1183 return !e->inline_failed;
1184 }
1185
1186
1187
1188 /* Expand all functions that must be output.
1189
1190 Attempt to topologically sort the nodes so function is output when
1191 all called functions are already assembled to allow data to be
1192 propagated across the callgraph. Use a stack to get smaller distance
1193 between a function and its callees (later we may choose to use a more
1194 sophisticated algorithm for function reordering; we will likely want
1195 to use subsections to make the output functions appear in top-down
1196 order). */
1197
1198 static void
1199 cgraph_expand_all_functions (void)
1200 {
1201 struct cgraph_node *node;
1202 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1203 int order_pos = 0, new_order_pos = 0;
1204 int i;
1205
1206 order_pos = cgraph_postorder (order);
1207 gcc_assert (order_pos == cgraph_n_nodes);
1208
1209 /* Garbage collector may remove inline clones we eliminate during
1210 optimization. So we must be sure to not reference them. */
1211 for (i = 0; i < order_pos; i++)
1212 if (order[i]->output)
1213 order[new_order_pos++] = order[i];
1214
1215 for (i = new_order_pos - 1; i >= 0; i--)
1216 {
1217 node = order[i];
1218 if (node->output)
1219 {
1220 gcc_assert (node->reachable);
1221 node->output = 0;
1222 cgraph_expand_function (node);
1223 }
1224 }
1225 cgraph_process_new_functions ();
1226
1227 free (order);
1228
1229 }
1230
1231 /* This is used to sort the node types by the cgraph order number. */
1232
1233 struct cgraph_order_sort
1234 {
1235 enum { ORDER_UNDEFINED = 0, ORDER_FUNCTION, ORDER_VAR, ORDER_ASM } kind;
1236 union
1237 {
1238 struct cgraph_node *f;
1239 struct varpool_node *v;
1240 struct cgraph_asm_node *a;
1241 } u;
1242 };
1243
1244 /* Output all functions, variables, and asm statements in the order
1245 according to their order fields, which is the order in which they
1246 appeared in the file. This implements -fno-toplevel-reorder. In
1247 this mode we may output functions and variables which don't really
1248 need to be output. */
1249
1250 static void
1251 cgraph_output_in_order (void)
1252 {
1253 int max;
1254 size_t size;
1255 struct cgraph_order_sort *nodes;
1256 int i;
1257 struct cgraph_node *pf;
1258 struct varpool_node *pv;
1259 struct cgraph_asm_node *pa;
1260
1261 max = cgraph_order;
1262 size = max * sizeof (struct cgraph_order_sort);
1263 nodes = (struct cgraph_order_sort *) alloca (size);
1264 memset (nodes, 0, size);
1265
1266 varpool_analyze_pending_decls ();
1267
1268 for (pf = cgraph_nodes; pf; pf = pf->next)
1269 {
1270 if (pf->output)
1271 {
1272 i = pf->order;
1273 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1274 nodes[i].kind = ORDER_FUNCTION;
1275 nodes[i].u.f = pf;
1276 }
1277 }
1278
1279 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
1280 {
1281 i = pv->order;
1282 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1283 nodes[i].kind = ORDER_VAR;
1284 nodes[i].u.v = pv;
1285 }
1286
1287 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1288 {
1289 i = pa->order;
1290 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1291 nodes[i].kind = ORDER_ASM;
1292 nodes[i].u.a = pa;
1293 }
1294
1295 for (i = 0; i < max; ++i)
1296 {
1297 switch (nodes[i].kind)
1298 {
1299 case ORDER_FUNCTION:
1300 nodes[i].u.f->output = 0;
1301 cgraph_expand_function (nodes[i].u.f);
1302 break;
1303
1304 case ORDER_VAR:
1305 varpool_assemble_decl (nodes[i].u.v);
1306 break;
1307
1308 case ORDER_ASM:
1309 assemble_asm (nodes[i].u.a->asm_str);
1310 break;
1311
1312 case ORDER_UNDEFINED:
1313 break;
1314
1315 default:
1316 gcc_unreachable ();
1317 }
1318 }
1319
1320 cgraph_asm_nodes = NULL;
1321 }
1322
1323 /* Mark visibility of all functions.
1324
1325 A local function is one whose calls can occur only in the current
1326 compilation unit and all its calls are explicit, so we can change
1327 its calling convention. We simply mark all static functions whose
1328 address is not taken as local.
1329
1330 We also change the TREE_PUBLIC flag of all declarations that are public
1331 in language point of view but we want to overwrite this default
1332 via visibilities for the backend point of view. */
1333
1334 static void
1335 cgraph_function_and_variable_visibility (void)
1336 {
1337 struct cgraph_node *node;
1338 struct varpool_node *vnode;
1339
1340 for (node = cgraph_nodes; node; node = node->next)
1341 {
1342 if (node->reachable
1343 && (DECL_COMDAT (node->decl)
1344 || (!flag_whole_program
1345 && TREE_PUBLIC (node->decl) && !DECL_EXTERNAL (node->decl))))
1346 node->local.externally_visible = true;
1347 if (!node->local.externally_visible && node->analyzed
1348 && !DECL_EXTERNAL (node->decl))
1349 {
1350 gcc_assert (flag_whole_program || !TREE_PUBLIC (node->decl));
1351 TREE_PUBLIC (node->decl) = 0;
1352 }
1353 node->local.local = (!node->needed
1354 && node->analyzed
1355 && !DECL_EXTERNAL (node->decl)
1356 && !node->local.externally_visible);
1357 }
1358 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
1359 {
1360 if (vnode->needed
1361 && !flag_whole_program
1362 && (DECL_COMDAT (vnode->decl) || TREE_PUBLIC (vnode->decl)))
1363 vnode->externally_visible = 1;
1364 if (!vnode->externally_visible)
1365 {
1366 gcc_assert (flag_whole_program || !TREE_PUBLIC (vnode->decl));
1367 TREE_PUBLIC (vnode->decl) = 0;
1368 }
1369 gcc_assert (TREE_STATIC (vnode->decl));
1370 }
1371
1372 /* Because we have to be conservative on the boundaries of source
1373 level units, it is possible that we marked some functions in
1374 reachable just because they might be used later via external
1375 linkage, but after making them local they are really unreachable
1376 now. */
1377 cgraph_remove_unreachable_nodes (true, cgraph_dump_file);
1378
1379 if (cgraph_dump_file)
1380 {
1381 fprintf (cgraph_dump_file, "\nMarking local functions:");
1382 for (node = cgraph_nodes; node; node = node->next)
1383 if (node->local.local)
1384 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1385 fprintf (cgraph_dump_file, "\n\n");
1386 fprintf (cgraph_dump_file, "\nMarking externally visible functions:");
1387 for (node = cgraph_nodes; node; node = node->next)
1388 if (node->local.externally_visible)
1389 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1390 fprintf (cgraph_dump_file, "\n\n");
1391 }
1392 cgraph_function_flags_ready = true;
1393 }
1394
1395 /* Return true when function body of DECL still needs to be kept around
1396 for later re-use. */
1397 bool
1398 cgraph_preserve_function_body_p (tree decl)
1399 {
1400 struct cgraph_node *node;
1401 if (!cgraph_global_info_ready)
1402 return (flag_really_no_inline
1403 ? lang_hooks.tree_inlining.disregard_inline_limits (decl)
1404 : DECL_INLINE (decl));
1405 /* Look if there is any clone around. */
1406 for (node = cgraph_node (decl); node; node = node->next_clone)
1407 if (node->global.inlined_to)
1408 return true;
1409 return false;
1410 }
1411
1412 static void
1413 ipa_passes (void)
1414 {
1415 cfun = NULL;
1416 current_function_decl = NULL;
1417 tree_register_cfg_hooks ();
1418 bitmap_obstack_initialize (NULL);
1419 execute_ipa_pass_list (all_ipa_passes);
1420 bitmap_obstack_release (NULL);
1421 }
1422
1423 /* Perform simple optimizations based on callgraph. */
1424
1425 void
1426 cgraph_optimize (void)
1427 {
1428 if (errorcount || sorrycount)
1429 return;
1430
1431 #ifdef ENABLE_CHECKING
1432 verify_cgraph ();
1433 #endif
1434 if (!flag_unit_at_a_time)
1435 {
1436 cgraph_assemble_pending_functions ();
1437 cgraph_process_new_functions ();
1438 cgraph_state = CGRAPH_STATE_FINISHED;
1439 cgraph_output_pending_asms ();
1440 varpool_assemble_pending_decls ();
1441 varpool_output_debug_info ();
1442 return;
1443 }
1444
1445 /* Frontend may output common variables after the unit has been finalized.
1446 It is safe to deal with them here as they are always zero initialized. */
1447 varpool_analyze_pending_decls ();
1448 cgraph_process_new_functions ();
1449
1450 timevar_push (TV_CGRAPHOPT);
1451 if (pre_ipa_mem_report)
1452 {
1453 fprintf (stderr, "Memory consumption before IPA\n");
1454 dump_memory_report (false);
1455 }
1456 if (!quiet_flag)
1457 fprintf (stderr, "Performing interprocedural optimizations\n");
1458
1459 cgraph_function_and_variable_visibility ();
1460 if (cgraph_dump_file)
1461 {
1462 fprintf (cgraph_dump_file, "Marked ");
1463 dump_cgraph (cgraph_dump_file);
1464 }
1465 cgraph_state = CGRAPH_STATE_IPA;
1466
1467 /* Don't run the IPA passes if there was any error or sorry messages. */
1468 if (errorcount == 0 && sorrycount == 0)
1469 ipa_passes ();
1470
1471 /* This pass remove bodies of extern inline functions we never inlined.
1472 Do this later so other IPA passes see what is really going on. */
1473 cgraph_remove_unreachable_nodes (false, dump_file);
1474 cgraph_increase_alignment ();
1475 cgraph_global_info_ready = true;
1476 if (cgraph_dump_file)
1477 {
1478 fprintf (cgraph_dump_file, "Optimized ");
1479 dump_cgraph (cgraph_dump_file);
1480 dump_varpool (cgraph_dump_file);
1481 }
1482 if (post_ipa_mem_report)
1483 {
1484 fprintf (stderr, "Memory consumption after IPA\n");
1485 dump_memory_report (false);
1486 }
1487 timevar_pop (TV_CGRAPHOPT);
1488
1489 /* Output everything. */
1490 if (!quiet_flag)
1491 fprintf (stderr, "Assembling functions:\n");
1492 #ifdef ENABLE_CHECKING
1493 verify_cgraph ();
1494 #endif
1495
1496 cgraph_mark_functions_to_output ();
1497
1498 cgraph_state = CGRAPH_STATE_EXPANSION;
1499 if (!flag_toplevel_reorder)
1500 cgraph_output_in_order ();
1501 else
1502 {
1503 cgraph_output_pending_asms ();
1504
1505 cgraph_expand_all_functions ();
1506 varpool_remove_unreferenced_decls ();
1507
1508 varpool_assemble_pending_decls ();
1509 varpool_output_debug_info ();
1510 }
1511 cgraph_process_new_functions ();
1512 cgraph_state = CGRAPH_STATE_FINISHED;
1513
1514 if (cgraph_dump_file)
1515 {
1516 fprintf (cgraph_dump_file, "\nFinal ");
1517 dump_cgraph (cgraph_dump_file);
1518 }
1519 #ifdef ENABLE_CHECKING
1520 verify_cgraph ();
1521 /* Double check that all inline clones are gone and that all
1522 function bodies have been released from memory. */
1523 if (flag_unit_at_a_time
1524 && !(sorrycount || errorcount))
1525 {
1526 struct cgraph_node *node;
1527 bool error_found = false;
1528
1529 for (node = cgraph_nodes; node; node = node->next)
1530 if (node->analyzed
1531 && (node->global.inlined_to
1532 || DECL_SAVED_TREE (node->decl)))
1533 {
1534 error_found = true;
1535 dump_cgraph_node (stderr, node);
1536 }
1537 if (error_found)
1538 internal_error ("nodes with no released memory found");
1539 }
1540 #endif
1541 }
1542
1543 /* Increase alignment of global arrays to improve vectorization potential.
1544 TODO:
1545 - Consider also structs that have an array field.
1546 - Use ipa analysis to prune arrays that can't be vectorized?
1547 This should involve global alignment analysis and in the future also
1548 array padding. */
1549
1550 static void
1551 cgraph_increase_alignment (void)
1552 {
1553 if (flag_section_anchors && flag_tree_vectorize)
1554 {
1555 struct varpool_node *vnode;
1556
1557 /* Increase the alignment of all global arrays for vectorization. */
1558 for (vnode = varpool_nodes_queue;
1559 vnode;
1560 vnode = vnode->next_needed)
1561 {
1562 tree vectype, decl = vnode->decl;
1563 unsigned int alignment;
1564
1565 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
1566 continue;
1567 vectype = get_vectype_for_scalar_type (TREE_TYPE (TREE_TYPE (decl)));
1568 if (!vectype)
1569 continue;
1570 alignment = TYPE_ALIGN (vectype);
1571 if (DECL_ALIGN (decl) >= alignment)
1572 continue;
1573
1574 if (vect_can_force_dr_alignment_p (decl, alignment))
1575 {
1576 DECL_ALIGN (decl) = TYPE_ALIGN (vectype);
1577 DECL_USER_ALIGN (decl) = 1;
1578 if (cgraph_dump_file)
1579 {
1580 fprintf (cgraph_dump_file, "Increasing alignment of decl: ");
1581 print_generic_expr (cgraph_dump_file, decl, TDF_SLIM);
1582 }
1583 }
1584 }
1585 }
1586 }
1587
1588 /* Generate and emit a static constructor or destructor. WHICH must be
1589 one of 'I' or 'D'. BODY should be a STATEMENT_LIST containing
1590 GENERIC statements. */
1591
1592 void
1593 cgraph_build_static_cdtor (char which, tree body, int priority)
1594 {
1595 static int counter = 0;
1596 char which_buf[16];
1597 tree decl, name, resdecl;
1598
1599 sprintf (which_buf, "%c_%d", which, counter++);
1600 name = get_file_function_name (which_buf);
1601
1602 decl = build_decl (FUNCTION_DECL, name,
1603 build_function_type (void_type_node, void_list_node));
1604 current_function_decl = decl;
1605
1606 resdecl = build_decl (RESULT_DECL, NULL_TREE, void_type_node);
1607 DECL_ARTIFICIAL (resdecl) = 1;
1608 DECL_IGNORED_P (resdecl) = 1;
1609 DECL_RESULT (decl) = resdecl;
1610
1611 allocate_struct_function (decl);
1612
1613 TREE_STATIC (decl) = 1;
1614 TREE_USED (decl) = 1;
1615 DECL_ARTIFICIAL (decl) = 1;
1616 DECL_IGNORED_P (decl) = 1;
1617 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1618 DECL_SAVED_TREE (decl) = body;
1619 TREE_PUBLIC (decl) = ! targetm.have_ctors_dtors;
1620 DECL_UNINLINABLE (decl) = 1;
1621
1622 DECL_INITIAL (decl) = make_node (BLOCK);
1623 TREE_USED (DECL_INITIAL (decl)) = 1;
1624
1625 DECL_SOURCE_LOCATION (decl) = input_location;
1626 cfun->function_end_locus = input_location;
1627
1628 switch (which)
1629 {
1630 case 'I':
1631 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1632 break;
1633 case 'D':
1634 DECL_STATIC_DESTRUCTOR (decl) = 1;
1635 break;
1636 default:
1637 gcc_unreachable ();
1638 }
1639
1640 gimplify_function_tree (decl);
1641
1642 cgraph_add_new_function (decl, false);
1643 cgraph_mark_needed_node (cgraph_node (decl));
1644
1645 if (targetm.have_ctors_dtors)
1646 {
1647 void (*fn) (rtx, int);
1648
1649 if (which == 'I')
1650 fn = targetm.asm_out.constructor;
1651 else
1652 fn = targetm.asm_out.destructor;
1653 fn (XEXP (DECL_RTL (decl), 0), priority);
1654 }
1655 }
1656
1657 void
1658 init_cgraph (void)
1659 {
1660 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1661 }
1662
1663 /* The edges representing the callers of the NEW_VERSION node were
1664 fixed by cgraph_function_versioning (), now the call_expr in their
1665 respective tree code should be updated to call the NEW_VERSION. */
1666
1667 static void
1668 update_call_expr (struct cgraph_node *new_version)
1669 {
1670 struct cgraph_edge *e;
1671
1672 gcc_assert (new_version);
1673 for (e = new_version->callers; e; e = e->next_caller)
1674 /* Update the call expr on the edges
1675 to call the new version. */
1676 TREE_OPERAND (TREE_OPERAND (get_call_expr_in (e->call_stmt), 0), 0) = new_version->decl;
1677 }
1678
1679
1680 /* Create a new cgraph node which is the new version of
1681 OLD_VERSION node. REDIRECT_CALLERS holds the callers
1682 edges which should be redirected to point to
1683 NEW_VERSION. ALL the callees edges of OLD_VERSION
1684 are cloned to the new version node. Return the new
1685 version node. */
1686
1687 static struct cgraph_node *
1688 cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
1689 tree new_decl,
1690 VEC(cgraph_edge_p,heap) *redirect_callers)
1691 {
1692 struct cgraph_node *new_version;
1693 struct cgraph_edge *e, *new_e;
1694 struct cgraph_edge *next_callee;
1695 unsigned i;
1696
1697 gcc_assert (old_version);
1698
1699 new_version = cgraph_node (new_decl);
1700
1701 new_version->analyzed = true;
1702 new_version->local = old_version->local;
1703 new_version->global = old_version->global;
1704 new_version->rtl = new_version->rtl;
1705 new_version->reachable = true;
1706 new_version->count = old_version->count;
1707
1708 /* Clone the old node callees. Recursive calls are
1709 also cloned. */
1710 for (e = old_version->callees;e; e=e->next_callee)
1711 {
1712 new_e = cgraph_clone_edge (e, new_version, e->call_stmt, 0, e->loop_nest, true);
1713 new_e->count = e->count;
1714 }
1715 /* Fix recursive calls.
1716 If OLD_VERSION has a recursive call after the
1717 previous edge cloning, the new version will have an edge
1718 pointing to the old version, which is wrong;
1719 Redirect it to point to the new version. */
1720 for (e = new_version->callees ; e; e = next_callee)
1721 {
1722 next_callee = e->next_callee;
1723 if (e->callee == old_version)
1724 cgraph_redirect_edge_callee (e, new_version);
1725
1726 if (!next_callee)
1727 break;
1728 }
1729 for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
1730 {
1731 /* Redirect calls to the old version node to point to its new
1732 version. */
1733 cgraph_redirect_edge_callee (e, new_version);
1734 }
1735
1736 return new_version;
1737 }
1738
1739 /* Perform function versioning.
1740 Function versioning includes copying of the tree and
1741 a callgraph update (creating a new cgraph node and updating
1742 its callees and callers).
1743
1744 REDIRECT_CALLERS varray includes the edges to be redirected
1745 to the new version.
1746
1747 TREE_MAP is a mapping of tree nodes we want to replace with
1748 new ones (according to results of prior analysis).
1749 OLD_VERSION_NODE is the node that is versioned.
1750 It returns the new version's cgraph node. */
1751
1752 struct cgraph_node *
1753 cgraph_function_versioning (struct cgraph_node *old_version_node,
1754 VEC(cgraph_edge_p,heap) *redirect_callers,
1755 varray_type tree_map)
1756 {
1757 tree old_decl = old_version_node->decl;
1758 struct cgraph_node *new_version_node = NULL;
1759 tree new_decl;
1760
1761 if (!tree_versionable_function_p (old_decl))
1762 return NULL;
1763
1764 /* Make a new FUNCTION_DECL tree node for the
1765 new version. */
1766 new_decl = copy_node (old_decl);
1767
1768 /* Create the new version's call-graph node.
1769 and update the edges of the new node. */
1770 new_version_node =
1771 cgraph_copy_node_for_versioning (old_version_node, new_decl,
1772 redirect_callers);
1773
1774 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1775 tree_function_versioning (old_decl, new_decl, tree_map, false);
1776 /* Update the call_expr on the edges to call the new version node. */
1777 update_call_expr (new_version_node);
1778
1779 /* Update the new version's properties.
1780 Make The new version visible only within this translation unit.
1781 ??? We cannot use COMDAT linkage because there is no
1782 ABI support for this. */
1783 DECL_EXTERNAL (new_version_node->decl) = 0;
1784 DECL_ONE_ONLY (new_version_node->decl) = 0;
1785 TREE_PUBLIC (new_version_node->decl) = 0;
1786 DECL_COMDAT (new_version_node->decl) = 0;
1787 new_version_node->local.externally_visible = 0;
1788 new_version_node->local.local = 1;
1789 new_version_node->lowered = true;
1790 return new_version_node;
1791 }
1792
1793 /* Produce separate function body for inline clones so the offline copy can be
1794 modified without affecting them. */
1795 struct cgraph_node *
1796 save_inline_function_body (struct cgraph_node *node)
1797 {
1798 struct cgraph_node *first_clone;
1799
1800 gcc_assert (node == cgraph_node (node->decl));
1801
1802 cgraph_lower_function (node);
1803
1804 /* In non-unit-at-a-time we construct full fledged clone we never output to
1805 assembly file. This clone is pointed out by inline_decl of original function
1806 and inlining infrastructure knows how to deal with this. */
1807 if (!flag_unit_at_a_time)
1808 {
1809 struct cgraph_edge *e;
1810
1811 first_clone = cgraph_clone_node (node, node->count, 0, false);
1812 first_clone->needed = 0;
1813 first_clone->reachable = 1;
1814 /* Recursively clone all bodies. */
1815 for (e = first_clone->callees; e; e = e->next_callee)
1816 if (!e->inline_failed)
1817 cgraph_clone_inlined_nodes (e, true, false);
1818 }
1819 else
1820 first_clone = node->next_clone;
1821
1822 first_clone->decl = copy_node (node->decl);
1823 node->next_clone = NULL;
1824 if (!flag_unit_at_a_time)
1825 node->inline_decl = first_clone->decl;
1826 first_clone->prev_clone = NULL;
1827 cgraph_insert_node_to_hashtable (first_clone);
1828 gcc_assert (first_clone == cgraph_node (first_clone->decl));
1829
1830 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1831 tree_function_versioning (node->decl, first_clone->decl, NULL, true);
1832
1833 DECL_EXTERNAL (first_clone->decl) = 0;
1834 DECL_ONE_ONLY (first_clone->decl) = 0;
1835 TREE_PUBLIC (first_clone->decl) = 0;
1836 DECL_COMDAT (first_clone->decl) = 0;
1837
1838 for (node = first_clone->next_clone; node; node = node->next_clone)
1839 node->decl = first_clone->decl;
1840 #ifdef ENABLE_CHECKING
1841 verify_cgraph_node (first_clone);
1842 #endif
1843 return first_clone;
1844 }