pr16194.c: We now output error on all three functions, not just first one.
[gcc.git] / gcc / cgraphunit.c
1 /* Callgraph based interprocedural optimizations.
2 Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
21
22 /* This module implements main driver of compilation process as well as
23 few basic interprocedural optimizers.
24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
35 (There is one exception needed for implementing GCC extern inline
36 function.)
37
38 - varpool_finalize_variable
39
40 This function has same behavior as the above but is used for static
41 variables.
42
43 - cgraph_finalize_compilation_unit
44
45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
47
48 In the unit-at-a-time the call-graph construction and local function
49 analysis takes place here. Bodies of unreachable functions are released
50 to conserve memory usage.
51
52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
54
55 - cgraph_optimize
56
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
61
62 - cgraph_mark_needed_node
63 - varpool_mark_needed_node
64
65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
69 used by C++ frontend to explicitly mark the keyed methods.
70
71 - analyze_expr callback
72
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
76
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
79
80 - expand_function callback
81
82 This function is used to expand function and pass it into RTL back-end.
83 Front-end should not make any assumptions about when this function can be
84 called. In particular cgraph_assemble_pending_functions,
85 varpool_assemble_pending_variables, cgraph_finalize_function,
86 varpool_finalize_function, cgraph_optimize can cause arbitrarily
87 previously finalized functions to be expanded.
88
89 We implement two compilation modes.
90
91 - unit-at-a-time: In this mode analyzing of all functions is deferred
92 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
93
94 In cgraph_finalize_compilation_unit the reachable functions are
95 analyzed. During analysis the call-graph edges from reachable
96 functions are constructed and their destinations are marked as
97 reachable. References to functions and variables are discovered too
98 and variables found to be needed output to the assembly file. Via
99 mark_referenced call in assemble_variable functions referenced by
100 static variables are noticed too.
101
102 The intra-procedural information is produced and its existence
103 indicated by global_info_ready. Once this flag is set it is impossible
104 to change function from !reachable to reachable and thus
105 assemble_variable no longer call mark_referenced.
106
107 Finally the call-graph is topologically sorted and all reachable functions
108 that has not been completely inlined or are not external are output.
109
110 ??? It is possible that reference to function or variable is optimized
111 out. We can not deal with this nicely because topological order is not
112 suitable for it. For tree-ssa we may consider another pass doing
113 optimization and re-discovering reachable functions.
114
115 ??? Reorganize code so variables are output very last and only if they
116 really has been referenced by produced code, so we catch more cases
117 where reference has been optimized out.
118
119 - non-unit-at-a-time
120
121 All functions are variables are output as early as possible to conserve
122 memory consumption. This may or may not result in less memory used but
123 it is still needed for some legacy code that rely on particular ordering
124 of things output from the compiler.
125
126 Varpool data structures are not used and variables are output directly.
127
128 Functions are output early using call of
129 cgraph_assemble_pending_function from cgraph_finalize_function. The
130 decision on whether function is needed is made more conservative so
131 uninlininable static functions are needed too. During the call-graph
132 construction the edge destinations are not marked as reachable and it
133 is completely relied upn assemble_variable to mark them. */
134
135
136 #include "config.h"
137 #include "system.h"
138 #include "coretypes.h"
139 #include "tm.h"
140 #include "tree.h"
141 #include "rtl.h"
142 #include "tree-flow.h"
143 #include "tree-inline.h"
144 #include "langhooks.h"
145 #include "pointer-set.h"
146 #include "toplev.h"
147 #include "flags.h"
148 #include "ggc.h"
149 #include "debug.h"
150 #include "target.h"
151 #include "cgraph.h"
152 #include "diagnostic.h"
153 #include "timevar.h"
154 #include "params.h"
155 #include "fibheap.h"
156 #include "c-common.h"
157 #include "intl.h"
158 #include "function.h"
159 #include "ipa-prop.h"
160 #include "tree-gimple.h"
161 #include "tree-pass.h"
162 #include "output.h"
163
164 static void cgraph_expand_all_functions (void);
165 static void cgraph_mark_functions_to_output (void);
166 static void cgraph_expand_function (struct cgraph_node *);
167 static tree record_reference (tree *, int *, void *);
168 static void cgraph_output_pending_asms (void);
169 static void cgraph_increase_alignment (void);
170 static void initialize_inline_failed (struct cgraph_node *);
171
172 /* Records tree nodes seen in record_reference. Simply using
173 walk_tree_without_duplicates doesn't guarantee each node is visited
174 once because it gets a new htab upon each recursive call from
175 record_reference itself. */
176 static struct pointer_set_t *visited_nodes;
177
178 static FILE *cgraph_dump_file;
179
180 /* Determine if function DECL is needed. That is, visible to something
181 either outside this translation unit, something magic in the system
182 configury, or (if not doing unit-at-a-time) to something we havn't
183 seen yet. */
184
185 static bool
186 decide_is_function_needed (struct cgraph_node *node, tree decl)
187 {
188 tree origin;
189 if (MAIN_NAME_P (DECL_NAME (decl))
190 && TREE_PUBLIC (decl))
191 {
192 node->local.externally_visible = true;
193 return true;
194 }
195
196 /* If the user told us it is used, then it must be so. */
197 if (node->local.externally_visible)
198 return true;
199
200 if (!flag_unit_at_a_time && lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
201 return true;
202
203 /* ??? If the assembler name is set by hand, it is possible to assemble
204 the name later after finalizing the function and the fact is noticed
205 in assemble_name then. This is arguably a bug. */
206 if (DECL_ASSEMBLER_NAME_SET_P (decl)
207 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
208 return true;
209
210 /* If we decided it was needed before, but at the time we didn't have
211 the body of the function available, then it's still needed. We have
212 to go back and re-check its dependencies now. */
213 if (node->needed)
214 return true;
215
216 /* Externally visible functions must be output. The exception is
217 COMDAT functions that must be output only when they are needed.
218
219 When not optimizing, also output the static functions. (see
220 PR24561), but don't do so for always_inline functions, functions
221 declared inline and nested functions. These was optimized out
222 in the original implementation and it is unclear whether we want
223 to change the behavior here. */
224 if (((TREE_PUBLIC (decl)
225 || (!optimize && !node->local.disregard_inline_limits
226 && !DECL_DECLARED_INLINE_P (decl)
227 && !node->origin))
228 && !flag_whole_program)
229 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
230 return true;
231
232 /* Constructors and destructors are reachable from the runtime by
233 some mechanism. */
234 if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
235 return true;
236
237 if (flag_unit_at_a_time)
238 return false;
239
240 /* If not doing unit at a time, then we'll only defer this function
241 if its marked for inlining. Otherwise we want to emit it now. */
242
243 /* "extern inline" functions are never output locally. */
244 if (DECL_EXTERNAL (decl))
245 return false;
246 /* Nested functions of extern inline function shall not be emit unless
247 we inlined the origin. */
248 for (origin = decl_function_context (decl); origin;
249 origin = decl_function_context (origin))
250 if (DECL_EXTERNAL (origin))
251 return false;
252 /* We want to emit COMDAT functions only when absolutely necessary. */
253 if (DECL_COMDAT (decl))
254 return false;
255 if (!DECL_INLINE (decl)
256 || (!node->local.disregard_inline_limits
257 /* When declared inline, defer even the uninlinable functions.
258 This allows them to be eliminated when unused. */
259 && !DECL_DECLARED_INLINE_P (decl)
260 && (!node->local.inlinable || !cgraph_default_inline_p (node, NULL))))
261 return true;
262
263 return false;
264 }
265
266 /* Process CGRAPH_NEW_FUNCTIONS and perform actions neccesary to add these
267 functions into callgraph in a way so they look like ordinary reachable
268 functions inserted into callgraph already at construction time. */
269
270 bool
271 cgraph_process_new_functions (void)
272 {
273 bool output = false;
274 tree fndecl;
275 struct cgraph_node *node;
276
277 /* Note that this queue may grow as its being processed, as the new
278 functions may generate new ones. */
279 while (cgraph_new_nodes)
280 {
281 node = cgraph_new_nodes;
282 fndecl = node->decl;
283 cgraph_new_nodes = cgraph_new_nodes->next_needed;
284 switch (cgraph_state)
285 {
286 case CGRAPH_STATE_CONSTRUCTION:
287 /* At construction time we just need to finalize function and move
288 it into reachable functions list. */
289
290 node->next_needed = NULL;
291 cgraph_finalize_function (fndecl, false);
292 cgraph_mark_reachable_node (node);
293 output = true;
294 break;
295
296 case CGRAPH_STATE_IPA:
297 case CGRAPH_STATE_IPA_SSA:
298 /* When IPA optimization already started, do all essential
299 transformations that has been already performed on the whole
300 cgraph but not on this function. */
301
302 tree_register_cfg_hooks ();
303 if (!node->analyzed)
304 cgraph_analyze_function (node);
305 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
306 current_function_decl = fndecl;
307 node->local.inlinable = tree_inlinable_function_p (fndecl);
308 node->local.self_insns = estimate_num_insns (fndecl);
309 node->local.disregard_inline_limits
310 = lang_hooks.tree_inlining.disregard_inline_limits (fndecl);
311 /* Inlining characteristics are maintained by the
312 cgraph_mark_inline. */
313 node->global.insns = node->local.self_insns;
314 initialize_inline_failed (node);
315 if (flag_really_no_inline && !node->local.disregard_inline_limits)
316 node->local.inlinable = 0;
317 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
318 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
319 /* When not optimizing, be sure we run early local passes anyway
320 to expand OMP. */
321 || !optimize)
322 execute_pass_list (pass_early_local_passes.sub);
323 free_dominance_info (CDI_POST_DOMINATORS);
324 free_dominance_info (CDI_DOMINATORS);
325 pop_cfun ();
326 current_function_decl = NULL;
327 break;
328
329 case CGRAPH_STATE_EXPANSION:
330 /* Functions created during expansion shall be compiled
331 directly. */
332 node->output = 0;
333 cgraph_expand_function (node);
334 break;
335
336 default:
337 gcc_unreachable ();
338 break;
339 }
340 }
341 return output;
342 }
343
344 /* When not doing unit-at-a-time, output all functions enqueued.
345 Return true when such a functions were found. */
346
347 static bool
348 cgraph_assemble_pending_functions (void)
349 {
350 bool output = false;
351
352 if (flag_unit_at_a_time)
353 return false;
354
355 cgraph_output_pending_asms ();
356
357 while (cgraph_nodes_queue)
358 {
359 struct cgraph_node *n = cgraph_nodes_queue;
360
361 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
362 n->next_needed = NULL;
363 if (!n->global.inlined_to
364 && !n->alias
365 && !DECL_EXTERNAL (n->decl))
366 {
367 cgraph_expand_function (n);
368 output = true;
369 }
370 output |= cgraph_process_new_functions ();
371 }
372
373 return output;
374 }
375
376
377 /* As an GCC extension we allow redefinition of the function. The
378 semantics when both copies of bodies differ is not well defined.
379 We replace the old body with new body so in unit at a time mode
380 we always use new body, while in normal mode we may end up with
381 old body inlined into some functions and new body expanded and
382 inlined in others.
383
384 ??? It may make more sense to use one body for inlining and other
385 body for expanding the function but this is difficult to do. */
386
387 static void
388 cgraph_reset_node (struct cgraph_node *node)
389 {
390 /* If node->output is set, then this is a unit-at-a-time compilation
391 and we have already begun whole-unit analysis. This is *not*
392 testing for whether we've already emitted the function. That
393 case can be sort-of legitimately seen with real function
394 redefinition errors. I would argue that the front end should
395 never present us with such a case, but don't enforce that for now. */
396 gcc_assert (!node->output);
397
398 /* Reset our data structures so we can analyze the function again. */
399 memset (&node->local, 0, sizeof (node->local));
400 memset (&node->global, 0, sizeof (node->global));
401 memset (&node->rtl, 0, sizeof (node->rtl));
402 node->analyzed = false;
403 node->local.redefined_extern_inline = true;
404 node->local.finalized = false;
405
406 if (!flag_unit_at_a_time)
407 {
408 struct cgraph_node *n, *next;
409
410 for (n = cgraph_nodes; n; n = next)
411 {
412 next = n->next;
413 if (n->global.inlined_to == node)
414 cgraph_remove_node (n);
415 }
416 }
417
418 cgraph_node_remove_callees (node);
419
420 /* We may need to re-queue the node for assembling in case
421 we already proceeded it and ignored as not needed. */
422 if (node->reachable && !flag_unit_at_a_time)
423 {
424 struct cgraph_node *n;
425
426 for (n = cgraph_nodes_queue; n; n = n->next_needed)
427 if (n == node)
428 break;
429 if (!n)
430 node->reachable = 0;
431 }
432 }
433
434 static void
435 cgraph_lower_function (struct cgraph_node *node)
436 {
437 if (node->lowered)
438 return;
439 tree_lowering_passes (node->decl);
440 node->lowered = true;
441 }
442
443 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
444 logic in effect. If NESTED is true, then our caller cannot stand to have
445 the garbage collector run at the moment. We would need to either create
446 a new GC context, or just not compile right now. */
447
448 void
449 cgraph_finalize_function (tree decl, bool nested)
450 {
451 struct cgraph_node *node = cgraph_node (decl);
452
453 if (node->local.finalized)
454 cgraph_reset_node (node);
455
456 notice_global_symbol (decl);
457 node->decl = decl;
458 node->local.finalized = true;
459 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
460 if (node->nested)
461 lower_nested_functions (decl);
462 gcc_assert (!node->nested);
463
464 /* If not unit at a time, then we need to create the call graph
465 now, so that called functions can be queued and emitted now. */
466 if (!flag_unit_at_a_time)
467 {
468 cgraph_analyze_function (node);
469 cgraph_decide_inlining_incrementally (node, false);
470 }
471
472 if (decide_is_function_needed (node, decl))
473 cgraph_mark_needed_node (node);
474
475 /* Since we reclaim unreachable nodes at the end of every language
476 level unit, we need to be conservative about possible entry points
477 there. */
478 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)))
479 cgraph_mark_reachable_node (node);
480
481 /* If not unit at a time, go ahead and emit everything we've found
482 to be reachable at this time. */
483 if (!nested)
484 {
485 if (!cgraph_assemble_pending_functions ())
486 ggc_collect ();
487 }
488
489 /* If we've not yet emitted decl, tell the debug info about it. */
490 if (!TREE_ASM_WRITTEN (decl))
491 (*debug_hooks->deferred_inline_function) (decl);
492
493 /* Possibly warn about unused parameters. */
494 if (warn_unused_parameter)
495 do_warn_unused_parameter (decl);
496 }
497
498 /* Walk tree and record all calls. Called via walk_tree. */
499 static tree
500 record_reference (tree *tp, int *walk_subtrees, void *data)
501 {
502 tree t = *tp;
503
504 switch (TREE_CODE (t))
505 {
506 case VAR_DECL:
507 /* ??? Really, we should mark this decl as *potentially* referenced
508 by this function and re-examine whether the decl is actually used
509 after rtl has been generated. */
510 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
511 {
512 varpool_mark_needed_node (varpool_node (t));
513 if (lang_hooks.callgraph.analyze_expr)
514 return lang_hooks.callgraph.analyze_expr (tp, walk_subtrees,
515 data);
516 }
517 break;
518
519 case FDESC_EXPR:
520 case ADDR_EXPR:
521 if (flag_unit_at_a_time)
522 {
523 /* Record dereferences to the functions. This makes the
524 functions reachable unconditionally. */
525 tree decl = TREE_OPERAND (*tp, 0);
526 if (TREE_CODE (decl) == FUNCTION_DECL)
527 cgraph_mark_needed_node (cgraph_node (decl));
528 }
529 break;
530
531 default:
532 /* Save some cycles by not walking types and declaration as we
533 won't find anything useful there anyway. */
534 if (IS_TYPE_OR_DECL_P (*tp))
535 {
536 *walk_subtrees = 0;
537 break;
538 }
539
540 if ((unsigned int) TREE_CODE (t) >= LAST_AND_UNUSED_TREE_CODE)
541 return lang_hooks.callgraph.analyze_expr (tp, walk_subtrees, data);
542 break;
543 }
544
545 return NULL;
546 }
547
548 /* Create cgraph edges for function calls inside BODY from NODE. */
549
550 static void
551 cgraph_create_edges (struct cgraph_node *node, tree body)
552 {
553 basic_block bb;
554
555 struct function *this_cfun = DECL_STRUCT_FUNCTION (body);
556 block_stmt_iterator bsi;
557 tree step;
558 visited_nodes = pointer_set_create ();
559
560 /* Reach the trees by walking over the CFG, and note the
561 enclosing basic-blocks in the call edges. */
562 FOR_EACH_BB_FN (bb, this_cfun)
563 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
564 {
565 tree stmt = bsi_stmt (bsi);
566 tree call = get_call_expr_in (stmt);
567 tree decl;
568
569 if (call && (decl = get_callee_fndecl (call)))
570 {
571 cgraph_create_edge (node, cgraph_node (decl), stmt,
572 bb->count,
573 bb->loop_depth);
574 walk_tree (&TREE_OPERAND (call, 1),
575 record_reference, node, visited_nodes);
576 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
577 walk_tree (&GIMPLE_STMT_OPERAND (stmt, 0),
578 record_reference, node, visited_nodes);
579 }
580 else
581 walk_tree (bsi_stmt_ptr (bsi), record_reference, node, visited_nodes);
582 }
583
584 /* Look for initializers of constant variables and private statics. */
585 for (step = DECL_STRUCT_FUNCTION (body)->unexpanded_var_list;
586 step;
587 step = TREE_CHAIN (step))
588 {
589 tree decl = TREE_VALUE (step);
590 if (TREE_CODE (decl) == VAR_DECL
591 && (TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
592 && flag_unit_at_a_time)
593 varpool_finalize_decl (decl);
594 else if (TREE_CODE (decl) == VAR_DECL && DECL_INITIAL (decl))
595 walk_tree (&DECL_INITIAL (decl), record_reference, node, visited_nodes);
596 }
597
598 pointer_set_destroy (visited_nodes);
599 visited_nodes = NULL;
600 }
601
602 void
603 record_references_in_initializer (tree decl)
604 {
605 visited_nodes = pointer_set_create ();
606 walk_tree (&DECL_INITIAL (decl), record_reference, NULL, visited_nodes);
607 pointer_set_destroy (visited_nodes);
608 visited_nodes = NULL;
609 }
610
611
612 /* Give initial reasons why inlining would fail. Those gets
613 either NULLified or usually overwritten by more precise reason
614 later. */
615 static void
616 initialize_inline_failed (struct cgraph_node *node)
617 {
618 struct cgraph_edge *e;
619
620 for (e = node->callers; e; e = e->next_caller)
621 {
622 gcc_assert (!e->callee->global.inlined_to);
623 gcc_assert (e->inline_failed);
624 if (node->local.redefined_extern_inline)
625 e->inline_failed = N_("redefined extern inline functions are not "
626 "considered for inlining");
627 else if (!node->local.inlinable)
628 e->inline_failed = N_("function not inlinable");
629 else
630 e->inline_failed = N_("function not considered for inlining");
631 }
632 }
633
634 /* Rebuild call edges from current function after a passes not aware
635 of cgraph updating. */
636 static unsigned int
637 rebuild_cgraph_edges (void)
638 {
639 basic_block bb;
640 struct cgraph_node *node = cgraph_node (current_function_decl);
641 block_stmt_iterator bsi;
642
643 cgraph_node_remove_callees (node);
644
645 node->count = ENTRY_BLOCK_PTR->count;
646
647 FOR_EACH_BB (bb)
648 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
649 {
650 tree stmt = bsi_stmt (bsi);
651 tree call = get_call_expr_in (stmt);
652 tree decl;
653
654 if (call && (decl = get_callee_fndecl (call)))
655 cgraph_create_edge (node, cgraph_node (decl), stmt,
656 bb->count,
657 bb->loop_depth);
658 }
659 initialize_inline_failed (node);
660 gcc_assert (!node->global.inlined_to);
661 return 0;
662 }
663
664 struct tree_opt_pass pass_rebuild_cgraph_edges =
665 {
666 NULL, /* name */
667 NULL, /* gate */
668 rebuild_cgraph_edges, /* execute */
669 NULL, /* sub */
670 NULL, /* next */
671 0, /* static_pass_number */
672 0, /* tv_id */
673 PROP_cfg, /* properties_required */
674 0, /* properties_provided */
675 0, /* properties_destroyed */
676 0, /* todo_flags_start */
677 0, /* todo_flags_finish */
678 0 /* letter */
679 };
680
681 /* Verify cgraph nodes of given cgraph node. */
682 void
683 verify_cgraph_node (struct cgraph_node *node)
684 {
685 struct cgraph_edge *e;
686 struct cgraph_node *main_clone;
687 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
688 basic_block this_block;
689 block_stmt_iterator bsi;
690 bool error_found = false;
691
692 if (errorcount || sorrycount)
693 return;
694
695 timevar_push (TV_CGRAPH_VERIFY);
696 for (e = node->callees; e; e = e->next_callee)
697 if (e->aux)
698 {
699 error ("aux field set for edge %s->%s",
700 cgraph_node_name (e->caller), cgraph_node_name (e->callee));
701 error_found = true;
702 }
703 if (node->count < 0)
704 {
705 error ("Execution count is negative");
706 error_found = true;
707 }
708 for (e = node->callers; e; e = e->next_caller)
709 {
710 if (e->count < 0)
711 {
712 error ("caller edge count is negative");
713 error_found = true;
714 }
715 if (!e->inline_failed)
716 {
717 if (node->global.inlined_to
718 != (e->caller->global.inlined_to
719 ? e->caller->global.inlined_to : e->caller))
720 {
721 error ("inlined_to pointer is wrong");
722 error_found = true;
723 }
724 if (node->callers->next_caller)
725 {
726 error ("multiple inline callers");
727 error_found = true;
728 }
729 }
730 else
731 if (node->global.inlined_to)
732 {
733 error ("inlined_to pointer set for noninline callers");
734 error_found = true;
735 }
736 }
737 if (!node->callers && node->global.inlined_to)
738 {
739 error ("inlined_to pointer is set but no predecessors found");
740 error_found = true;
741 }
742 if (node->global.inlined_to == node)
743 {
744 error ("inlined_to pointer refers to itself");
745 error_found = true;
746 }
747
748 for (main_clone = cgraph_node (node->decl); main_clone;
749 main_clone = main_clone->next_clone)
750 if (main_clone == node)
751 break;
752 if (!cgraph_node (node->decl))
753 {
754 error ("node not found in cgraph_hash");
755 error_found = true;
756 }
757
758 if (node->analyzed
759 && DECL_SAVED_TREE (node->decl) && !TREE_ASM_WRITTEN (node->decl)
760 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to))
761 {
762 if (this_cfun->cfg)
763 {
764 /* The nodes we're interested in are never shared, so walk
765 the tree ignoring duplicates. */
766 visited_nodes = pointer_set_create ();
767 /* Reach the trees by walking over the CFG, and note the
768 enclosing basic-blocks in the call edges. */
769 FOR_EACH_BB_FN (this_block, this_cfun)
770 for (bsi = bsi_start (this_block); !bsi_end_p (bsi); bsi_next (&bsi))
771 {
772 tree stmt = bsi_stmt (bsi);
773 tree call = get_call_expr_in (stmt);
774 tree decl;
775 if (call && (decl = get_callee_fndecl (call)))
776 {
777 struct cgraph_edge *e = cgraph_edge (node, stmt);
778 if (e)
779 {
780 if (e->aux)
781 {
782 error ("shared call_stmt:");
783 debug_generic_stmt (stmt);
784 error_found = true;
785 }
786 if (e->callee->decl != cgraph_node (decl)->decl
787 && e->inline_failed)
788 {
789 error ("edge points to wrong declaration:");
790 debug_tree (e->callee->decl);
791 fprintf (stderr," Instead of:");
792 debug_tree (decl);
793 }
794 e->aux = (void *)1;
795 }
796 else
797 {
798 error ("missing callgraph edge for call stmt:");
799 debug_generic_stmt (stmt);
800 error_found = true;
801 }
802 }
803 }
804 pointer_set_destroy (visited_nodes);
805 visited_nodes = NULL;
806 }
807 else
808 /* No CFG available?! */
809 gcc_unreachable ();
810
811 for (e = node->callees; e; e = e->next_callee)
812 {
813 if (!e->aux)
814 {
815 error ("edge %s->%s has no corresponding call_stmt",
816 cgraph_node_name (e->caller),
817 cgraph_node_name (e->callee));
818 debug_generic_stmt (e->call_stmt);
819 error_found = true;
820 }
821 e->aux = 0;
822 }
823 }
824 if (error_found)
825 {
826 dump_cgraph_node (stderr, node);
827 internal_error ("verify_cgraph_node failed");
828 }
829 timevar_pop (TV_CGRAPH_VERIFY);
830 }
831
832 /* Verify whole cgraph structure. */
833 void
834 verify_cgraph (void)
835 {
836 struct cgraph_node *node;
837
838 if (sorrycount || errorcount)
839 return;
840
841 for (node = cgraph_nodes; node; node = node->next)
842 verify_cgraph_node (node);
843 }
844
845 /* Output all asm statements we have stored up to be output. */
846
847 static void
848 cgraph_output_pending_asms (void)
849 {
850 struct cgraph_asm_node *can;
851
852 if (errorcount || sorrycount)
853 return;
854
855 for (can = cgraph_asm_nodes; can; can = can->next)
856 assemble_asm (can->asm_str);
857 cgraph_asm_nodes = NULL;
858 }
859
860 /* Analyze the function scheduled to be output. */
861 void
862 cgraph_analyze_function (struct cgraph_node *node)
863 {
864 tree decl = node->decl;
865
866 current_function_decl = decl;
867 push_cfun (DECL_STRUCT_FUNCTION (decl));
868 cgraph_lower_function (node);
869
870 /* First kill forward declaration so reverse inlining works properly. */
871 cgraph_create_edges (node, decl);
872
873 node->local.estimated_self_stack_size = estimated_stack_frame_size ();
874 node->global.estimated_stack_size = node->local.estimated_self_stack_size;
875 node->global.stack_frame_offset = 0;
876 node->local.inlinable = tree_inlinable_function_p (decl);
877 if (!flag_unit_at_a_time)
878 node->local.self_insns = estimate_num_insns (decl);
879 if (node->local.inlinable)
880 node->local.disregard_inline_limits
881 = lang_hooks.tree_inlining.disregard_inline_limits (decl);
882 initialize_inline_failed (node);
883 if (flag_really_no_inline && !node->local.disregard_inline_limits)
884 node->local.inlinable = 0;
885 /* Inlining characteristics are maintained by the cgraph_mark_inline. */
886 node->global.insns = node->local.self_insns;
887 if (!flag_unit_at_a_time)
888 {
889 bitmap_obstack_initialize (NULL);
890 tree_register_cfg_hooks ();
891 execute_pass_list (pass_early_local_passes.sub);
892 free_dominance_info (CDI_POST_DOMINATORS);
893 free_dominance_info (CDI_DOMINATORS);
894 bitmap_obstack_release (NULL);
895 }
896
897 node->analyzed = true;
898 pop_cfun ();
899 current_function_decl = NULL;
900 }
901
902 /* Look for externally_visible and used attributes and mark cgraph nodes
903 accordingly.
904
905 We cannot mark the nodes at the point the attributes are processed (in
906 handle_*_attribute) because the copy of the declarations available at that
907 point may not be canonical. For example, in:
908
909 void f();
910 void f() __attribute__((used));
911
912 the declaration we see in handle_used_attribute will be the second
913 declaration -- but the front end will subsequently merge that declaration
914 with the original declaration and discard the second declaration.
915
916 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
917
918 void f() {}
919 void f() __attribute__((externally_visible));
920
921 is valid.
922
923 So, we walk the nodes at the end of the translation unit, applying the
924 attributes at that point. */
925
926 static void
927 process_function_and_variable_attributes (struct cgraph_node *first,
928 struct varpool_node *first_var)
929 {
930 struct cgraph_node *node;
931 struct varpool_node *vnode;
932
933 for (node = cgraph_nodes; node != first; node = node->next)
934 {
935 tree decl = node->decl;
936 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
937 {
938 mark_decl_referenced (decl);
939 if (node->local.finalized)
940 cgraph_mark_needed_node (node);
941 }
942 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
943 {
944 if (! TREE_PUBLIC (node->decl))
945 warning (OPT_Wattributes,
946 "%J%<externally_visible%> attribute have effect only on public objects",
947 node->decl);
948 else
949 {
950 if (node->local.finalized)
951 cgraph_mark_needed_node (node);
952 node->local.externally_visible = true;
953 }
954 }
955 }
956 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
957 {
958 tree decl = vnode->decl;
959 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
960 {
961 mark_decl_referenced (decl);
962 if (vnode->finalized)
963 varpool_mark_needed_node (vnode);
964 }
965 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
966 {
967 if (! TREE_PUBLIC (vnode->decl))
968 warning (OPT_Wattributes,
969 "%J%<externally_visible%> attribute have effect only on public objects",
970 vnode->decl);
971 else
972 {
973 if (vnode->finalized)
974 varpool_mark_needed_node (vnode);
975 vnode->externally_visible = true;
976 }
977 }
978 }
979 }
980
981 /* Analyze the whole compilation unit once it is parsed completely. */
982
983 void
984 cgraph_finalize_compilation_unit (void)
985 {
986 struct cgraph_node *node, *next;
987 /* Keep track of already processed nodes when called multiple times for
988 intermodule optimization. */
989 static struct cgraph_node *first_analyzed;
990 struct cgraph_node *first_processed = first_analyzed;
991 static struct varpool_node *first_analyzed_var;
992
993 if (errorcount || sorrycount)
994 return;
995
996 finish_aliases_1 ();
997
998 if (!flag_unit_at_a_time)
999 {
1000 cgraph_output_pending_asms ();
1001 cgraph_assemble_pending_functions ();
1002 varpool_output_debug_info ();
1003 return;
1004 }
1005
1006 if (!quiet_flag)
1007 {
1008 fprintf (stderr, "\nAnalyzing compilation unit\n");
1009 fflush (stderr);
1010 }
1011
1012 timevar_push (TV_CGRAPH);
1013 process_function_and_variable_attributes (first_processed,
1014 first_analyzed_var);
1015 first_processed = cgraph_nodes;
1016 first_analyzed_var = varpool_nodes;
1017 varpool_analyze_pending_decls ();
1018 if (cgraph_dump_file)
1019 {
1020 fprintf (cgraph_dump_file, "Initial entry points:");
1021 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1022 if (node->needed && DECL_SAVED_TREE (node->decl))
1023 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1024 fprintf (cgraph_dump_file, "\n");
1025 }
1026
1027 /* Propagate reachability flag and lower representation of all reachable
1028 functions. In the future, lowering will introduce new functions and
1029 new entry points on the way (by template instantiation and virtual
1030 method table generation for instance). */
1031 while (cgraph_nodes_queue)
1032 {
1033 struct cgraph_edge *edge;
1034 tree decl = cgraph_nodes_queue->decl;
1035
1036 node = cgraph_nodes_queue;
1037 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
1038 node->next_needed = NULL;
1039
1040 /* ??? It is possible to create extern inline function and later using
1041 weak alias attribute to kill its body. See
1042 gcc.c-torture/compile/20011119-1.c */
1043 if (!DECL_SAVED_TREE (decl))
1044 {
1045 cgraph_reset_node (node);
1046 continue;
1047 }
1048
1049 gcc_assert (!node->analyzed && node->reachable);
1050 gcc_assert (DECL_SAVED_TREE (decl));
1051
1052 cgraph_analyze_function (node);
1053
1054 for (edge = node->callees; edge; edge = edge->next_callee)
1055 if (!edge->callee->reachable)
1056 cgraph_mark_reachable_node (edge->callee);
1057
1058 /* We finalize local static variables during constructing callgraph
1059 edges. Process their attributes too. */
1060 process_function_and_variable_attributes (first_processed,
1061 first_analyzed_var);
1062 first_processed = cgraph_nodes;
1063 first_analyzed_var = varpool_nodes;
1064 varpool_analyze_pending_decls ();
1065 }
1066
1067 /* Collect entry points to the unit. */
1068 if (cgraph_dump_file)
1069 {
1070 fprintf (cgraph_dump_file, "Unit entry points:");
1071 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1072 if (node->needed && DECL_SAVED_TREE (node->decl))
1073 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1074 fprintf (cgraph_dump_file, "\n\nInitial ");
1075 dump_cgraph (cgraph_dump_file);
1076 }
1077
1078 if (cgraph_dump_file)
1079 fprintf (cgraph_dump_file, "\nReclaiming functions:");
1080
1081 for (node = cgraph_nodes; node != first_analyzed; node = next)
1082 {
1083 tree decl = node->decl;
1084 next = node->next;
1085
1086 if (node->local.finalized && !DECL_SAVED_TREE (decl))
1087 cgraph_reset_node (node);
1088
1089 if (!node->reachable && DECL_SAVED_TREE (decl))
1090 {
1091 if (cgraph_dump_file)
1092 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1093 cgraph_remove_node (node);
1094 continue;
1095 }
1096 else
1097 node->next_needed = NULL;
1098 gcc_assert (!node->local.finalized || DECL_SAVED_TREE (decl));
1099 gcc_assert (node->analyzed == node->local.finalized);
1100 }
1101 if (cgraph_dump_file)
1102 {
1103 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1104 dump_cgraph (cgraph_dump_file);
1105 }
1106 first_analyzed = cgraph_nodes;
1107 ggc_collect ();
1108 timevar_pop (TV_CGRAPH);
1109 }
1110 /* Figure out what functions we want to assemble. */
1111
1112 static void
1113 cgraph_mark_functions_to_output (void)
1114 {
1115 struct cgraph_node *node;
1116
1117 for (node = cgraph_nodes; node; node = node->next)
1118 {
1119 tree decl = node->decl;
1120 struct cgraph_edge *e;
1121
1122 gcc_assert (!node->output);
1123
1124 for (e = node->callers; e; e = e->next_caller)
1125 if (e->inline_failed)
1126 break;
1127
1128 /* We need to output all local functions that are used and not
1129 always inlined, as well as those that are reachable from
1130 outside the current compilation unit. */
1131 if (DECL_SAVED_TREE (decl)
1132 && !node->global.inlined_to
1133 && (node->needed
1134 || (e && node->reachable))
1135 && !TREE_ASM_WRITTEN (decl)
1136 && !DECL_EXTERNAL (decl))
1137 node->output = 1;
1138 else
1139 {
1140 /* We should've reclaimed all functions that are not needed. */
1141 #ifdef ENABLE_CHECKING
1142 if (!node->global.inlined_to && DECL_SAVED_TREE (decl)
1143 && !DECL_EXTERNAL (decl))
1144 {
1145 dump_cgraph_node (stderr, node);
1146 internal_error ("failed to reclaim unneeded function");
1147 }
1148 #endif
1149 gcc_assert (node->global.inlined_to || !DECL_SAVED_TREE (decl)
1150 || DECL_EXTERNAL (decl));
1151
1152 }
1153
1154 }
1155 }
1156
1157 /* Expand function specified by NODE. */
1158
1159 static void
1160 cgraph_expand_function (struct cgraph_node *node)
1161 {
1162 tree decl = node->decl;
1163
1164 /* We ought to not compile any inline clones. */
1165 gcc_assert (!node->global.inlined_to);
1166
1167 if (flag_unit_at_a_time)
1168 announce_function (decl);
1169
1170 cgraph_lower_function (node);
1171
1172 /* Generate RTL for the body of DECL. */
1173 lang_hooks.callgraph.expand_function (decl);
1174
1175 /* Make sure that BE didn't give up on compiling. */
1176 /* ??? Can happen with nested function of extern inline. */
1177 gcc_assert (TREE_ASM_WRITTEN (node->decl));
1178
1179 current_function_decl = NULL;
1180 if (!cgraph_preserve_function_body_p (node->decl))
1181 {
1182 DECL_SAVED_TREE (node->decl) = NULL;
1183 DECL_STRUCT_FUNCTION (node->decl) = NULL;
1184 DECL_INITIAL (node->decl) = error_mark_node;
1185 /* Eliminate all call edges. This is important so the call_expr no longer
1186 points to the dead function body. */
1187 cgraph_node_remove_callees (node);
1188 }
1189
1190 cgraph_function_flags_ready = true;
1191 }
1192
1193 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
1194
1195 bool
1196 cgraph_inline_p (struct cgraph_edge *e, const char **reason)
1197 {
1198 *reason = e->inline_failed;
1199 return !e->inline_failed;
1200 }
1201
1202
1203
1204 /* Expand all functions that must be output.
1205
1206 Attempt to topologically sort the nodes so function is output when
1207 all called functions are already assembled to allow data to be
1208 propagated across the callgraph. Use a stack to get smaller distance
1209 between a function and its callees (later we may choose to use a more
1210 sophisticated algorithm for function reordering; we will likely want
1211 to use subsections to make the output functions appear in top-down
1212 order). */
1213
1214 static void
1215 cgraph_expand_all_functions (void)
1216 {
1217 struct cgraph_node *node;
1218 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1219 int order_pos = 0, new_order_pos = 0;
1220 int i;
1221
1222 order_pos = cgraph_postorder (order);
1223 gcc_assert (order_pos == cgraph_n_nodes);
1224
1225 /* Garbage collector may remove inline clones we eliminate during
1226 optimization. So we must be sure to not reference them. */
1227 for (i = 0; i < order_pos; i++)
1228 if (order[i]->output)
1229 order[new_order_pos++] = order[i];
1230
1231 for (i = new_order_pos - 1; i >= 0; i--)
1232 {
1233 node = order[i];
1234 if (node->output)
1235 {
1236 gcc_assert (node->reachable);
1237 node->output = 0;
1238 cgraph_expand_function (node);
1239 }
1240 }
1241 cgraph_process_new_functions ();
1242
1243 free (order);
1244
1245 }
1246
1247 /* This is used to sort the node types by the cgraph order number. */
1248
1249 struct cgraph_order_sort
1250 {
1251 enum { ORDER_UNDEFINED = 0, ORDER_FUNCTION, ORDER_VAR, ORDER_ASM } kind;
1252 union
1253 {
1254 struct cgraph_node *f;
1255 struct varpool_node *v;
1256 struct cgraph_asm_node *a;
1257 } u;
1258 };
1259
1260 /* Output all functions, variables, and asm statements in the order
1261 according to their order fields, which is the order in which they
1262 appeared in the file. This implements -fno-toplevel-reorder. In
1263 this mode we may output functions and variables which don't really
1264 need to be output. */
1265
1266 static void
1267 cgraph_output_in_order (void)
1268 {
1269 int max;
1270 size_t size;
1271 struct cgraph_order_sort *nodes;
1272 int i;
1273 struct cgraph_node *pf;
1274 struct varpool_node *pv;
1275 struct cgraph_asm_node *pa;
1276
1277 max = cgraph_order;
1278 size = max * sizeof (struct cgraph_order_sort);
1279 nodes = (struct cgraph_order_sort *) alloca (size);
1280 memset (nodes, 0, size);
1281
1282 varpool_analyze_pending_decls ();
1283
1284 for (pf = cgraph_nodes; pf; pf = pf->next)
1285 {
1286 if (pf->output)
1287 {
1288 i = pf->order;
1289 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1290 nodes[i].kind = ORDER_FUNCTION;
1291 nodes[i].u.f = pf;
1292 }
1293 }
1294
1295 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
1296 {
1297 i = pv->order;
1298 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1299 nodes[i].kind = ORDER_VAR;
1300 nodes[i].u.v = pv;
1301 }
1302
1303 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1304 {
1305 i = pa->order;
1306 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1307 nodes[i].kind = ORDER_ASM;
1308 nodes[i].u.a = pa;
1309 }
1310
1311 for (i = 0; i < max; ++i)
1312 {
1313 switch (nodes[i].kind)
1314 {
1315 case ORDER_FUNCTION:
1316 nodes[i].u.f->output = 0;
1317 cgraph_expand_function (nodes[i].u.f);
1318 break;
1319
1320 case ORDER_VAR:
1321 varpool_assemble_decl (nodes[i].u.v);
1322 break;
1323
1324 case ORDER_ASM:
1325 assemble_asm (nodes[i].u.a->asm_str);
1326 break;
1327
1328 case ORDER_UNDEFINED:
1329 break;
1330
1331 default:
1332 gcc_unreachable ();
1333 }
1334 }
1335
1336 cgraph_asm_nodes = NULL;
1337 }
1338
1339 /* Mark visibility of all functions.
1340
1341 A local function is one whose calls can occur only in the current
1342 compilation unit and all its calls are explicit, so we can change
1343 its calling convention. We simply mark all static functions whose
1344 address is not taken as local.
1345
1346 We also change the TREE_PUBLIC flag of all declarations that are public
1347 in language point of view but we want to overwrite this default
1348 via visibilities for the backend point of view. */
1349
1350 static void
1351 cgraph_function_and_variable_visibility (void)
1352 {
1353 struct cgraph_node *node;
1354 struct varpool_node *vnode;
1355
1356 for (node = cgraph_nodes; node; node = node->next)
1357 {
1358 if (node->reachable
1359 && (DECL_COMDAT (node->decl)
1360 || (!flag_whole_program
1361 && TREE_PUBLIC (node->decl) && !DECL_EXTERNAL (node->decl))))
1362 node->local.externally_visible = true;
1363 if (!node->local.externally_visible && node->analyzed
1364 && !DECL_EXTERNAL (node->decl))
1365 {
1366 gcc_assert (flag_whole_program || !TREE_PUBLIC (node->decl));
1367 TREE_PUBLIC (node->decl) = 0;
1368 }
1369 node->local.local = (!node->needed
1370 && node->analyzed
1371 && !DECL_EXTERNAL (node->decl)
1372 && !node->local.externally_visible);
1373 }
1374 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
1375 {
1376 if (vnode->needed
1377 && !flag_whole_program
1378 && (DECL_COMDAT (vnode->decl) || TREE_PUBLIC (vnode->decl)))
1379 vnode->externally_visible = 1;
1380 if (!vnode->externally_visible)
1381 {
1382 gcc_assert (flag_whole_program || !TREE_PUBLIC (vnode->decl));
1383 TREE_PUBLIC (vnode->decl) = 0;
1384 }
1385 gcc_assert (TREE_STATIC (vnode->decl));
1386 }
1387
1388 /* Because we have to be conservative on the boundaries of source
1389 level units, it is possible that we marked some functions in
1390 reachable just because they might be used later via external
1391 linkage, but after making them local they are really unreachable
1392 now. */
1393 cgraph_remove_unreachable_nodes (true, cgraph_dump_file);
1394
1395 if (cgraph_dump_file)
1396 {
1397 fprintf (cgraph_dump_file, "\nMarking local functions:");
1398 for (node = cgraph_nodes; node; node = node->next)
1399 if (node->local.local)
1400 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1401 fprintf (cgraph_dump_file, "\n\n");
1402 fprintf (cgraph_dump_file, "\nMarking externally visible functions:");
1403 for (node = cgraph_nodes; node; node = node->next)
1404 if (node->local.externally_visible)
1405 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1406 fprintf (cgraph_dump_file, "\n\n");
1407 }
1408 cgraph_function_flags_ready = true;
1409 }
1410
1411 /* Return true when function body of DECL still needs to be kept around
1412 for later re-use. */
1413 bool
1414 cgraph_preserve_function_body_p (tree decl)
1415 {
1416 struct cgraph_node *node;
1417 if (!cgraph_global_info_ready)
1418 return (flag_really_no_inline
1419 ? lang_hooks.tree_inlining.disregard_inline_limits (decl)
1420 : DECL_INLINE (decl));
1421 /* Look if there is any clone around. */
1422 for (node = cgraph_node (decl); node; node = node->next_clone)
1423 if (node->global.inlined_to)
1424 return true;
1425 return false;
1426 }
1427
1428 static void
1429 ipa_passes (void)
1430 {
1431 cfun = NULL;
1432 current_function_decl = NULL;
1433 tree_register_cfg_hooks ();
1434 bitmap_obstack_initialize (NULL);
1435 execute_ipa_pass_list (all_ipa_passes);
1436 bitmap_obstack_release (NULL);
1437 }
1438
1439 /* Perform simple optimizations based on callgraph. */
1440
1441 void
1442 cgraph_optimize (void)
1443 {
1444 if (errorcount || sorrycount)
1445 return;
1446
1447 #ifdef ENABLE_CHECKING
1448 verify_cgraph ();
1449 #endif
1450 if (!flag_unit_at_a_time)
1451 {
1452 cgraph_assemble_pending_functions ();
1453 cgraph_process_new_functions ();
1454 cgraph_state = CGRAPH_STATE_FINISHED;
1455 cgraph_output_pending_asms ();
1456 varpool_assemble_pending_decls ();
1457 varpool_output_debug_info ();
1458 return;
1459 }
1460
1461 /* Frontend may output common variables after the unit has been finalized.
1462 It is safe to deal with them here as they are always zero initialized. */
1463 varpool_analyze_pending_decls ();
1464 cgraph_process_new_functions ();
1465
1466 timevar_push (TV_CGRAPHOPT);
1467 if (pre_ipa_mem_report)
1468 {
1469 fprintf (stderr, "Memory consumption before IPA\n");
1470 dump_memory_report (false);
1471 }
1472 if (!quiet_flag)
1473 fprintf (stderr, "Performing interprocedural optimizations\n");
1474
1475 cgraph_function_and_variable_visibility ();
1476 if (cgraph_dump_file)
1477 {
1478 fprintf (cgraph_dump_file, "Marked ");
1479 dump_cgraph (cgraph_dump_file);
1480 }
1481 cgraph_state = CGRAPH_STATE_IPA;
1482
1483 /* Don't run the IPA passes if there was any error or sorry messages. */
1484 if (errorcount == 0 && sorrycount == 0)
1485 ipa_passes ();
1486
1487 /* This pass remove bodies of extern inline functions we never inlined.
1488 Do this later so other IPA passes see what is really going on. */
1489 cgraph_remove_unreachable_nodes (false, dump_file);
1490 cgraph_increase_alignment ();
1491 cgraph_global_info_ready = true;
1492 if (cgraph_dump_file)
1493 {
1494 fprintf (cgraph_dump_file, "Optimized ");
1495 dump_cgraph (cgraph_dump_file);
1496 dump_varpool (cgraph_dump_file);
1497 }
1498 if (post_ipa_mem_report)
1499 {
1500 fprintf (stderr, "Memory consumption after IPA\n");
1501 dump_memory_report (false);
1502 }
1503 timevar_pop (TV_CGRAPHOPT);
1504
1505 /* Output everything. */
1506 if (!quiet_flag)
1507 fprintf (stderr, "Assembling functions:\n");
1508 #ifdef ENABLE_CHECKING
1509 verify_cgraph ();
1510 #endif
1511
1512 cgraph_mark_functions_to_output ();
1513
1514 cgraph_state = CGRAPH_STATE_EXPANSION;
1515 if (!flag_toplevel_reorder)
1516 cgraph_output_in_order ();
1517 else
1518 {
1519 cgraph_output_pending_asms ();
1520
1521 cgraph_expand_all_functions ();
1522 varpool_remove_unreferenced_decls ();
1523
1524 varpool_assemble_pending_decls ();
1525 varpool_output_debug_info ();
1526 }
1527 cgraph_process_new_functions ();
1528 cgraph_state = CGRAPH_STATE_FINISHED;
1529
1530 if (cgraph_dump_file)
1531 {
1532 fprintf (cgraph_dump_file, "\nFinal ");
1533 dump_cgraph (cgraph_dump_file);
1534 }
1535 #ifdef ENABLE_CHECKING
1536 verify_cgraph ();
1537 /* Double check that all inline clones are gone and that all
1538 function bodies have been released from memory. */
1539 if (flag_unit_at_a_time
1540 && !(sorrycount || errorcount))
1541 {
1542 struct cgraph_node *node;
1543 bool error_found = false;
1544
1545 for (node = cgraph_nodes; node; node = node->next)
1546 if (node->analyzed
1547 && (node->global.inlined_to
1548 || DECL_SAVED_TREE (node->decl)))
1549 {
1550 error_found = true;
1551 dump_cgraph_node (stderr, node);
1552 }
1553 if (error_found)
1554 internal_error ("nodes with no released memory found");
1555 }
1556 #endif
1557 }
1558
1559 /* Increase alignment of global arrays to improve vectorization potential.
1560 TODO:
1561 - Consider also structs that have an array field.
1562 - Use ipa analysis to prune arrays that can't be vectorized?
1563 This should involve global alignment analysis and in the future also
1564 array padding. */
1565
1566 static void
1567 cgraph_increase_alignment (void)
1568 {
1569 if (flag_section_anchors && flag_tree_vectorize)
1570 {
1571 struct varpool_node *vnode;
1572
1573 /* Increase the alignment of all global arrays for vectorization. */
1574 for (vnode = varpool_nodes_queue;
1575 vnode;
1576 vnode = vnode->next_needed)
1577 {
1578 tree vectype, decl = vnode->decl;
1579 unsigned int alignment;
1580
1581 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
1582 continue;
1583 vectype = get_vectype_for_scalar_type (TREE_TYPE (TREE_TYPE (decl)));
1584 if (!vectype)
1585 continue;
1586 alignment = TYPE_ALIGN (vectype);
1587 if (DECL_ALIGN (decl) >= alignment)
1588 continue;
1589
1590 if (vect_can_force_dr_alignment_p (decl, alignment))
1591 {
1592 DECL_ALIGN (decl) = TYPE_ALIGN (vectype);
1593 DECL_USER_ALIGN (decl) = 1;
1594 if (cgraph_dump_file)
1595 {
1596 fprintf (cgraph_dump_file, "Increasing alignment of decl: ");
1597 print_generic_expr (cgraph_dump_file, decl, TDF_SLIM);
1598 }
1599 }
1600 }
1601 }
1602 }
1603
1604 /* Generate and emit a static constructor or destructor. WHICH must be
1605 one of 'I' or 'D'. BODY should be a STATEMENT_LIST containing
1606 GENERIC statements. */
1607
1608 void
1609 cgraph_build_static_cdtor (char which, tree body, int priority)
1610 {
1611 static int counter = 0;
1612 char which_buf[16];
1613 tree decl, name, resdecl;
1614
1615 sprintf (which_buf, "%c_%d", which, counter++);
1616 name = get_file_function_name (which_buf);
1617
1618 decl = build_decl (FUNCTION_DECL, name,
1619 build_function_type (void_type_node, void_list_node));
1620 current_function_decl = decl;
1621
1622 resdecl = build_decl (RESULT_DECL, NULL_TREE, void_type_node);
1623 DECL_ARTIFICIAL (resdecl) = 1;
1624 DECL_IGNORED_P (resdecl) = 1;
1625 DECL_RESULT (decl) = resdecl;
1626
1627 allocate_struct_function (decl);
1628
1629 TREE_STATIC (decl) = 1;
1630 TREE_USED (decl) = 1;
1631 DECL_ARTIFICIAL (decl) = 1;
1632 DECL_IGNORED_P (decl) = 1;
1633 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1634 DECL_SAVED_TREE (decl) = body;
1635 TREE_PUBLIC (decl) = ! targetm.have_ctors_dtors;
1636 DECL_UNINLINABLE (decl) = 1;
1637
1638 DECL_INITIAL (decl) = make_node (BLOCK);
1639 TREE_USED (DECL_INITIAL (decl)) = 1;
1640
1641 DECL_SOURCE_LOCATION (decl) = input_location;
1642 cfun->function_end_locus = input_location;
1643
1644 switch (which)
1645 {
1646 case 'I':
1647 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1648 break;
1649 case 'D':
1650 DECL_STATIC_DESTRUCTOR (decl) = 1;
1651 break;
1652 default:
1653 gcc_unreachable ();
1654 }
1655
1656 gimplify_function_tree (decl);
1657
1658 cgraph_add_new_function (decl, false);
1659 cgraph_mark_needed_node (cgraph_node (decl));
1660
1661 if (targetm.have_ctors_dtors)
1662 {
1663 void (*fn) (rtx, int);
1664
1665 if (which == 'I')
1666 fn = targetm.asm_out.constructor;
1667 else
1668 fn = targetm.asm_out.destructor;
1669 fn (XEXP (DECL_RTL (decl), 0), priority);
1670 }
1671 }
1672
1673 void
1674 init_cgraph (void)
1675 {
1676 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1677 }
1678
1679 /* The edges representing the callers of the NEW_VERSION node were
1680 fixed by cgraph_function_versioning (), now the call_expr in their
1681 respective tree code should be updated to call the NEW_VERSION. */
1682
1683 static void
1684 update_call_expr (struct cgraph_node *new_version)
1685 {
1686 struct cgraph_edge *e;
1687
1688 gcc_assert (new_version);
1689 for (e = new_version->callers; e; e = e->next_caller)
1690 /* Update the call expr on the edges
1691 to call the new version. */
1692 TREE_OPERAND (TREE_OPERAND (get_call_expr_in (e->call_stmt), 0), 0) = new_version->decl;
1693 }
1694
1695
1696 /* Create a new cgraph node which is the new version of
1697 OLD_VERSION node. REDIRECT_CALLERS holds the callers
1698 edges which should be redirected to point to
1699 NEW_VERSION. ALL the callees edges of OLD_VERSION
1700 are cloned to the new version node. Return the new
1701 version node. */
1702
1703 static struct cgraph_node *
1704 cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
1705 tree new_decl,
1706 VEC(cgraph_edge_p,heap) *redirect_callers)
1707 {
1708 struct cgraph_node *new_version;
1709 struct cgraph_edge *e, *new_e;
1710 struct cgraph_edge *next_callee;
1711 unsigned i;
1712
1713 gcc_assert (old_version);
1714
1715 new_version = cgraph_node (new_decl);
1716
1717 new_version->analyzed = true;
1718 new_version->local = old_version->local;
1719 new_version->global = old_version->global;
1720 new_version->rtl = new_version->rtl;
1721 new_version->reachable = true;
1722 new_version->count = old_version->count;
1723
1724 /* Clone the old node callees. Recursive calls are
1725 also cloned. */
1726 for (e = old_version->callees;e; e=e->next_callee)
1727 {
1728 new_e = cgraph_clone_edge (e, new_version, e->call_stmt, 0, e->loop_nest, true);
1729 new_e->count = e->count;
1730 }
1731 /* Fix recursive calls.
1732 If OLD_VERSION has a recursive call after the
1733 previous edge cloning, the new version will have an edge
1734 pointing to the old version, which is wrong;
1735 Redirect it to point to the new version. */
1736 for (e = new_version->callees ; e; e = next_callee)
1737 {
1738 next_callee = e->next_callee;
1739 if (e->callee == old_version)
1740 cgraph_redirect_edge_callee (e, new_version);
1741
1742 if (!next_callee)
1743 break;
1744 }
1745 for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
1746 {
1747 /* Redirect calls to the old version node to point to its new
1748 version. */
1749 cgraph_redirect_edge_callee (e, new_version);
1750 }
1751
1752 return new_version;
1753 }
1754
1755 /* Perform function versioning.
1756 Function versioning includes copying of the tree and
1757 a callgraph update (creating a new cgraph node and updating
1758 its callees and callers).
1759
1760 REDIRECT_CALLERS varray includes the edges to be redirected
1761 to the new version.
1762
1763 TREE_MAP is a mapping of tree nodes we want to replace with
1764 new ones (according to results of prior analysis).
1765 OLD_VERSION_NODE is the node that is versioned.
1766 It returns the new version's cgraph node. */
1767
1768 struct cgraph_node *
1769 cgraph_function_versioning (struct cgraph_node *old_version_node,
1770 VEC(cgraph_edge_p,heap) *redirect_callers,
1771 varray_type tree_map)
1772 {
1773 tree old_decl = old_version_node->decl;
1774 struct cgraph_node *new_version_node = NULL;
1775 tree new_decl;
1776
1777 if (!tree_versionable_function_p (old_decl))
1778 return NULL;
1779
1780 /* Make a new FUNCTION_DECL tree node for the
1781 new version. */
1782 new_decl = copy_node (old_decl);
1783
1784 /* Create the new version's call-graph node.
1785 and update the edges of the new node. */
1786 new_version_node =
1787 cgraph_copy_node_for_versioning (old_version_node, new_decl,
1788 redirect_callers);
1789
1790 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1791 tree_function_versioning (old_decl, new_decl, tree_map, false);
1792 /* Update the call_expr on the edges to call the new version node. */
1793 update_call_expr (new_version_node);
1794
1795 /* Update the new version's properties.
1796 Make The new version visible only within this translation unit.
1797 ??? We cannot use COMDAT linkage because there is no
1798 ABI support for this. */
1799 DECL_EXTERNAL (new_version_node->decl) = 0;
1800 DECL_ONE_ONLY (new_version_node->decl) = 0;
1801 TREE_PUBLIC (new_version_node->decl) = 0;
1802 DECL_COMDAT (new_version_node->decl) = 0;
1803 new_version_node->local.externally_visible = 0;
1804 new_version_node->local.local = 1;
1805 new_version_node->lowered = true;
1806 return new_version_node;
1807 }
1808
1809 /* Produce separate function body for inline clones so the offline copy can be
1810 modified without affecting them. */
1811 struct cgraph_node *
1812 save_inline_function_body (struct cgraph_node *node)
1813 {
1814 struct cgraph_node *first_clone;
1815
1816 gcc_assert (node == cgraph_node (node->decl));
1817
1818 cgraph_lower_function (node);
1819
1820 /* In non-unit-at-a-time we construct full fledged clone we never output to
1821 assembly file. This clone is pointed out by inline_decl of original function
1822 and inlining infrastructure knows how to deal with this. */
1823 if (!flag_unit_at_a_time)
1824 {
1825 struct cgraph_edge *e;
1826
1827 first_clone = cgraph_clone_node (node, node->count, 0, false);
1828 first_clone->needed = 0;
1829 first_clone->reachable = 1;
1830 /* Recursively clone all bodies. */
1831 for (e = first_clone->callees; e; e = e->next_callee)
1832 if (!e->inline_failed)
1833 cgraph_clone_inlined_nodes (e, true, false);
1834 }
1835 else
1836 first_clone = node->next_clone;
1837
1838 first_clone->decl = copy_node (node->decl);
1839 node->next_clone = NULL;
1840 if (!flag_unit_at_a_time)
1841 node->inline_decl = first_clone->decl;
1842 first_clone->prev_clone = NULL;
1843 cgraph_insert_node_to_hashtable (first_clone);
1844 gcc_assert (first_clone == cgraph_node (first_clone->decl));
1845
1846 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1847 tree_function_versioning (node->decl, first_clone->decl, NULL, true);
1848
1849 DECL_EXTERNAL (first_clone->decl) = 0;
1850 DECL_ONE_ONLY (first_clone->decl) = 0;
1851 TREE_PUBLIC (first_clone->decl) = 0;
1852 DECL_COMDAT (first_clone->decl) = 0;
1853
1854 for (node = first_clone->next_clone; node; node = node->next_clone)
1855 node->decl = first_clone->decl;
1856 #ifdef ENABLE_CHECKING
1857 verify_cgraph_node (first_clone);
1858 #endif
1859 return first_clone;
1860 }