re PR middle-end/29299 (gcc "used" attribute has no effect on local-scope static...
[gcc.git] / gcc / cgraphunit.c
1 /* Callgraph based interprocedural optimizations.
2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
21
22 /* This module implements main driver of compilation process as well as
23 few basic interprocedural optimizers.
24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
35 (There is one exception needed for implementing GCC extern inline function.)
36
37 - cgraph_varpool_finalize_variable
38
39 This function has same behavior as the above but is used for static
40 variables.
41
42 - cgraph_finalize_compilation_unit
43
44 This function is called once compilation unit is finalized and it will
45 no longer change.
46
47 In the unit-at-a-time the call-graph construction and local function
48 analysis takes place here. Bodies of unreachable functions are released
49 to conserve memory usage.
50
51 ??? The compilation unit in this point of view should be compilation
52 unit as defined by the language - for instance C frontend allows multiple
53 compilation units to be parsed at once and it should call function each
54 time parsing is done so we save memory.
55
56 - cgraph_optimize
57
58 In this unit-at-a-time compilation the intra procedural analysis takes
59 place here. In particular the static functions whose address is never
60 taken are marked as local. Backend can then use this information to
61 modify calling conventions, do better inlining or similar optimizations.
62
63 - cgraph_assemble_pending_functions
64 - cgraph_varpool_assemble_pending_variables
65
66 In non-unit-at-a-time mode these functions can be used to force compilation
67 of functions or variables that are known to be needed at given stage
68 of compilation
69
70 - cgraph_mark_needed_node
71 - cgraph_varpool_mark_needed_node
72
73 When function or variable is referenced by some hidden way (for instance
74 via assembly code and marked by attribute "used"), the call-graph data structure
75 must be updated accordingly by this function.
76
77 - analyze_expr callback
78
79 This function is responsible for lowering tree nodes not understood by
80 generic code into understandable ones or alternatively marking
81 callgraph and varpool nodes referenced by the as needed.
82
83 ??? On the tree-ssa genericizing should take place here and we will avoid
84 need for these hooks (replacing them by genericizing hook)
85
86 - expand_function callback
87
88 This function is used to expand function and pass it into RTL back-end.
89 Front-end should not make any assumptions about when this function can be
90 called. In particular cgraph_assemble_pending_functions,
91 cgraph_varpool_assemble_pending_variables, cgraph_finalize_function,
92 cgraph_varpool_finalize_function, cgraph_optimize can cause arbitrarily
93 previously finalized functions to be expanded.
94
95 We implement two compilation modes.
96
97 - unit-at-a-time: In this mode analyzing of all functions is deferred
98 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
99
100 In cgraph_finalize_compilation_unit the reachable functions are
101 analyzed. During analysis the call-graph edges from reachable
102 functions are constructed and their destinations are marked as
103 reachable. References to functions and variables are discovered too
104 and variables found to be needed output to the assembly file. Via
105 mark_referenced call in assemble_variable functions referenced by
106 static variables are noticed too.
107
108 The intra-procedural information is produced and its existence
109 indicated by global_info_ready. Once this flag is set it is impossible
110 to change function from !reachable to reachable and thus
111 assemble_variable no longer call mark_referenced.
112
113 Finally the call-graph is topologically sorted and all reachable functions
114 that has not been completely inlined or are not external are output.
115
116 ??? It is possible that reference to function or variable is optimized
117 out. We can not deal with this nicely because topological order is not
118 suitable for it. For tree-ssa we may consider another pass doing
119 optimization and re-discovering reachable functions.
120
121 ??? Reorganize code so variables are output very last and only if they
122 really has been referenced by produced code, so we catch more cases
123 where reference has been optimized out.
124
125 - non-unit-at-a-time
126
127 All functions are variables are output as early as possible to conserve
128 memory consumption. This may or may not result in less memory used but
129 it is still needed for some legacy code that rely on particular ordering
130 of things output from the compiler.
131
132 Varpool data structures are not used and variables are output directly.
133
134 Functions are output early using call of
135 cgraph_assemble_pending_function from cgraph_finalize_function. The
136 decision on whether function is needed is made more conservative so
137 uninlininable static functions are needed too. During the call-graph
138 construction the edge destinations are not marked as reachable and it
139 is completely relied upn assemble_variable to mark them. */
140
141
142 #include "config.h"
143 #include "system.h"
144 #include "coretypes.h"
145 #include "tm.h"
146 #include "tree.h"
147 #include "rtl.h"
148 #include "tree-flow.h"
149 #include "tree-inline.h"
150 #include "langhooks.h"
151 #include "pointer-set.h"
152 #include "toplev.h"
153 #include "flags.h"
154 #include "ggc.h"
155 #include "debug.h"
156 #include "target.h"
157 #include "cgraph.h"
158 #include "diagnostic.h"
159 #include "timevar.h"
160 #include "params.h"
161 #include "fibheap.h"
162 #include "c-common.h"
163 #include "intl.h"
164 #include "function.h"
165 #include "ipa-prop.h"
166 #include "tree-gimple.h"
167 #include "tree-pass.h"
168 #include "output.h"
169
170 static void cgraph_expand_all_functions (void);
171 static void cgraph_mark_functions_to_output (void);
172 static void cgraph_expand_function (struct cgraph_node *);
173 static tree record_reference (tree *, int *, void *);
174 static void cgraph_output_pending_asms (void);
175 static void cgraph_increase_alignment (void);
176
177 /* Lists all assembled variables to be sent to debugger output later on. */
178 static GTY(()) struct cgraph_varpool_node *cgraph_varpool_assembled_nodes_queue;
179
180 /* Records tree nodes seen in record_reference. Simply using
181 walk_tree_without_duplicates doesn't guarantee each node is visited
182 once because it gets a new htab upon each recursive call from
183 record_reference itself. */
184 static struct pointer_set_t *visited_nodes;
185
186 static FILE *cgraph_dump_file;
187
188 /* Determine if function DECL is needed. That is, visible to something
189 either outside this translation unit, something magic in the system
190 configury, or (if not doing unit-at-a-time) to something we havn't
191 seen yet. */
192
193 static bool
194 decide_is_function_needed (struct cgraph_node *node, tree decl)
195 {
196 tree origin;
197 if (MAIN_NAME_P (DECL_NAME (decl))
198 && TREE_PUBLIC (decl))
199 {
200 node->local.externally_visible = true;
201 return true;
202 }
203
204 /* If the user told us it is used, then it must be so. */
205 if (node->local.externally_visible)
206 return true;
207
208 if (!flag_unit_at_a_time && lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
209 return true;
210
211 /* ??? If the assembler name is set by hand, it is possible to assemble
212 the name later after finalizing the function and the fact is noticed
213 in assemble_name then. This is arguably a bug. */
214 if (DECL_ASSEMBLER_NAME_SET_P (decl)
215 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
216 return true;
217
218 /* If we decided it was needed before, but at the time we didn't have
219 the body of the function available, then it's still needed. We have
220 to go back and re-check its dependencies now. */
221 if (node->needed)
222 return true;
223
224 /* Externally visible functions must be output. The exception is
225 COMDAT functions that must be output only when they are needed.
226
227 When not optimizing, also output the static functions. (see
228 PR24561), but don't do so for always_inline functions, functions
229 declared inline and nested functions. These was optimized out
230 in the original implementation and it is unclear whether we want
231 to change the behavior here. */
232 if (((TREE_PUBLIC (decl)
233 || (!optimize && !node->local.disregard_inline_limits
234 && !DECL_DECLARED_INLINE_P (decl)
235 && !node->origin))
236 && !flag_whole_program)
237 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
238 return true;
239
240 /* Constructors and destructors are reachable from the runtime by
241 some mechanism. */
242 if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
243 return true;
244
245 if (flag_unit_at_a_time)
246 return false;
247
248 /* If not doing unit at a time, then we'll only defer this function
249 if its marked for inlining. Otherwise we want to emit it now. */
250
251 /* "extern inline" functions are never output locally. */
252 if (DECL_EXTERNAL (decl))
253 return false;
254 /* Nested functions of extern inline function shall not be emit unless
255 we inlined the origin. */
256 for (origin = decl_function_context (decl); origin;
257 origin = decl_function_context (origin))
258 if (DECL_EXTERNAL (origin))
259 return false;
260 /* We want to emit COMDAT functions only when absolutely necessary. */
261 if (DECL_COMDAT (decl))
262 return false;
263 if (!DECL_INLINE (decl)
264 || (!node->local.disregard_inline_limits
265 /* When declared inline, defer even the uninlinable functions.
266 This allows them to be eliminated when unused. */
267 && !DECL_DECLARED_INLINE_P (decl)
268 && (!node->local.inlinable || !cgraph_default_inline_p (node, NULL))))
269 return true;
270
271 return false;
272 }
273
274 /* Walk the decls we marked as necessary and see if they reference new
275 variables or functions and add them into the worklists. */
276 static bool
277 cgraph_varpool_analyze_pending_decls (void)
278 {
279 bool changed = false;
280 timevar_push (TV_CGRAPH);
281
282 while (cgraph_varpool_first_unanalyzed_node)
283 {
284 tree decl = cgraph_varpool_first_unanalyzed_node->decl;
285
286 cgraph_varpool_first_unanalyzed_node->analyzed = true;
287
288 cgraph_varpool_first_unanalyzed_node = cgraph_varpool_first_unanalyzed_node->next_needed;
289
290 /* Compute the alignment early so function body expanders are
291 already informed about increased alignment. */
292 align_variable (decl, 0);
293
294 if (DECL_INITIAL (decl))
295 {
296 visited_nodes = pointer_set_create ();
297 walk_tree (&DECL_INITIAL (decl), record_reference, NULL, visited_nodes);
298 pointer_set_destroy (visited_nodes);
299 visited_nodes = NULL;
300 }
301 changed = true;
302 }
303 timevar_pop (TV_CGRAPH);
304 return changed;
305 }
306
307 /* Optimization of function bodies might've rendered some variables as
308 unnecessary so we want to avoid these from being compiled.
309
310 This is done by pruning the queue and keeping only the variables that
311 really appear needed (ie they are either externally visible or referenced
312 by compiled function). Re-doing the reachability analysis on variables
313 brings back the remaining variables referenced by these. */
314 static void
315 cgraph_varpool_remove_unreferenced_decls (void)
316 {
317 struct cgraph_varpool_node *next, *node = cgraph_varpool_nodes_queue;
318
319 cgraph_varpool_reset_queue ();
320
321 if (errorcount || sorrycount)
322 return;
323
324 while (node)
325 {
326 tree decl = node->decl;
327 next = node->next_needed;
328 node->needed = 0;
329
330 if (node->finalized
331 && ((DECL_ASSEMBLER_NAME_SET_P (decl)
332 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
333 || node->force_output
334 || decide_is_variable_needed (node, decl)
335 /* ??? Cgraph does not yet rule the world with an iron hand,
336 and does not control the emission of debug information.
337 After a variable has its DECL_RTL set, we must assume that
338 it may be referenced by the debug information, and we can
339 no longer elide it. */
340 || DECL_RTL_SET_P (decl)))
341 cgraph_varpool_mark_needed_node (node);
342
343 node = next;
344 }
345 /* Make sure we mark alias targets as used targets. */
346 finish_aliases_1 ();
347 cgraph_varpool_analyze_pending_decls ();
348 }
349
350
351 /* When not doing unit-at-a-time, output all functions enqueued.
352 Return true when such a functions were found. */
353
354 bool
355 cgraph_assemble_pending_functions (void)
356 {
357 bool output = false;
358
359 if (flag_unit_at_a_time)
360 return false;
361
362 cgraph_output_pending_asms ();
363
364 while (cgraph_nodes_queue)
365 {
366 struct cgraph_node *n = cgraph_nodes_queue;
367
368 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
369 n->next_needed = NULL;
370 if (!n->global.inlined_to
371 && !n->alias
372 && !DECL_EXTERNAL (n->decl))
373 {
374 cgraph_expand_function (n);
375 output = true;
376 }
377 }
378
379 /* Process CGRAPH_EXPAND_QUEUE, these are functions created during
380 the expansion process. Note that this queue may grow as its
381 being processed, as the new functions may generate new ones. */
382 while (cgraph_expand_queue)
383 {
384 struct cgraph_node *n = cgraph_expand_queue;
385 cgraph_expand_queue = cgraph_expand_queue->next_needed;
386 n->next_needed = NULL;
387 cgraph_finalize_function (n->decl, false);
388 output = true;
389 }
390
391 return output;
392 }
393
394
395 /* As an GCC extension we allow redefinition of the function. The
396 semantics when both copies of bodies differ is not well defined.
397 We replace the old body with new body so in unit at a time mode
398 we always use new body, while in normal mode we may end up with
399 old body inlined into some functions and new body expanded and
400 inlined in others.
401
402 ??? It may make more sense to use one body for inlining and other
403 body for expanding the function but this is difficult to do. */
404
405 static void
406 cgraph_reset_node (struct cgraph_node *node)
407 {
408 /* If node->output is set, then this is a unit-at-a-time compilation
409 and we have already begun whole-unit analysis. This is *not*
410 testing for whether we've already emitted the function. That
411 case can be sort-of legitimately seen with real function
412 redefinition errors. I would argue that the front end should
413 never present us with such a case, but don't enforce that for now. */
414 gcc_assert (!node->output);
415
416 /* Reset our data structures so we can analyze the function again. */
417 memset (&node->local, 0, sizeof (node->local));
418 memset (&node->global, 0, sizeof (node->global));
419 memset (&node->rtl, 0, sizeof (node->rtl));
420 node->analyzed = false;
421 node->local.redefined_extern_inline = true;
422 node->local.finalized = false;
423
424 if (!flag_unit_at_a_time)
425 {
426 struct cgraph_node *n, *next;
427
428 for (n = cgraph_nodes; n; n = next)
429 {
430 next = n->next;
431 if (n->global.inlined_to == node)
432 cgraph_remove_node (n);
433 }
434 }
435
436 cgraph_node_remove_callees (node);
437
438 /* We may need to re-queue the node for assembling in case
439 we already proceeded it and ignored as not needed. */
440 if (node->reachable && !flag_unit_at_a_time)
441 {
442 struct cgraph_node *n;
443
444 for (n = cgraph_nodes_queue; n; n = n->next_needed)
445 if (n == node)
446 break;
447 if (!n)
448 node->reachable = 0;
449 }
450 }
451
452 static void
453 cgraph_lower_function (struct cgraph_node *node)
454 {
455 if (node->lowered)
456 return;
457 tree_lowering_passes (node->decl);
458 node->lowered = true;
459 }
460
461 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
462 logic in effect. If NESTED is true, then our caller cannot stand to have
463 the garbage collector run at the moment. We would need to either create
464 a new GC context, or just not compile right now. */
465
466 void
467 cgraph_finalize_function (tree decl, bool nested)
468 {
469 struct cgraph_node *node = cgraph_node (decl);
470
471 if (node->local.finalized)
472 cgraph_reset_node (node);
473
474 notice_global_symbol (decl);
475 node->decl = decl;
476 node->local.finalized = true;
477 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
478 if (node->nested)
479 lower_nested_functions (decl);
480 gcc_assert (!node->nested);
481
482 /* If not unit at a time, then we need to create the call graph
483 now, so that called functions can be queued and emitted now. */
484 if (!flag_unit_at_a_time)
485 {
486 cgraph_analyze_function (node);
487 cgraph_decide_inlining_incrementally (node, false);
488 }
489
490 if (decide_is_function_needed (node, decl))
491 cgraph_mark_needed_node (node);
492
493 /* Since we reclaim unreachable nodes at the end of every language
494 level unit, we need to be conservative about possible entry points
495 there. */
496 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)))
497 cgraph_mark_reachable_node (node);
498
499 /* If not unit at a time, go ahead and emit everything we've found
500 to be reachable at this time. */
501 if (!nested)
502 {
503 if (!cgraph_assemble_pending_functions ())
504 ggc_collect ();
505 }
506
507 /* If we've not yet emitted decl, tell the debug info about it. */
508 if (!TREE_ASM_WRITTEN (decl))
509 (*debug_hooks->deferred_inline_function) (decl);
510
511 /* Possibly warn about unused parameters. */
512 if (warn_unused_parameter)
513 do_warn_unused_parameter (decl);
514 }
515
516 /* Walk tree and record all calls. Called via walk_tree. */
517 static tree
518 record_reference (tree *tp, int *walk_subtrees, void *data)
519 {
520 tree t = *tp;
521
522 switch (TREE_CODE (t))
523 {
524 case VAR_DECL:
525 /* ??? Really, we should mark this decl as *potentially* referenced
526 by this function and re-examine whether the decl is actually used
527 after rtl has been generated. */
528 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
529 {
530 cgraph_varpool_mark_needed_node (cgraph_varpool_node (t));
531 if (lang_hooks.callgraph.analyze_expr)
532 return lang_hooks.callgraph.analyze_expr (tp, walk_subtrees,
533 data);
534 }
535 break;
536
537 case FDESC_EXPR:
538 case ADDR_EXPR:
539 if (flag_unit_at_a_time)
540 {
541 /* Record dereferences to the functions. This makes the
542 functions reachable unconditionally. */
543 tree decl = TREE_OPERAND (*tp, 0);
544 if (TREE_CODE (decl) == FUNCTION_DECL)
545 cgraph_mark_needed_node (cgraph_node (decl));
546 }
547 break;
548
549 default:
550 /* Save some cycles by not walking types and declaration as we
551 won't find anything useful there anyway. */
552 if (IS_TYPE_OR_DECL_P (*tp))
553 {
554 *walk_subtrees = 0;
555 break;
556 }
557
558 if ((unsigned int) TREE_CODE (t) >= LAST_AND_UNUSED_TREE_CODE)
559 return lang_hooks.callgraph.analyze_expr (tp, walk_subtrees, data);
560 break;
561 }
562
563 return NULL;
564 }
565
566 /* Create cgraph edges for function calls inside BODY from NODE. */
567
568 static void
569 cgraph_create_edges (struct cgraph_node *node, tree body)
570 {
571 basic_block bb;
572
573 struct function *this_cfun = DECL_STRUCT_FUNCTION (body);
574 block_stmt_iterator bsi;
575 tree step;
576 visited_nodes = pointer_set_create ();
577
578 /* Reach the trees by walking over the CFG, and note the
579 enclosing basic-blocks in the call edges. */
580 FOR_EACH_BB_FN (bb, this_cfun)
581 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
582 {
583 tree stmt = bsi_stmt (bsi);
584 tree call = get_call_expr_in (stmt);
585 tree decl;
586
587 if (call && (decl = get_callee_fndecl (call)))
588 {
589 cgraph_create_edge (node, cgraph_node (decl), stmt,
590 bb->count,
591 bb->loop_depth);
592 walk_tree (&TREE_OPERAND (call, 1),
593 record_reference, node, visited_nodes);
594 if (TREE_CODE (stmt) == MODIFY_EXPR)
595 walk_tree (&TREE_OPERAND (stmt, 0),
596 record_reference, node, visited_nodes);
597 }
598 else
599 walk_tree (bsi_stmt_ptr (bsi), record_reference, node, visited_nodes);
600 }
601
602 /* Look for initializers of constant variables and private statics. */
603 for (step = DECL_STRUCT_FUNCTION (body)->unexpanded_var_list;
604 step;
605 step = TREE_CHAIN (step))
606 {
607 tree decl = TREE_VALUE (step);
608 if (TREE_CODE (decl) == VAR_DECL
609 && (TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
610 && flag_unit_at_a_time)
611 cgraph_varpool_finalize_decl (decl);
612 else if (TREE_CODE (decl) == VAR_DECL && DECL_INITIAL (decl))
613 walk_tree (&DECL_INITIAL (decl), record_reference, node, visited_nodes);
614 }
615
616 pointer_set_destroy (visited_nodes);
617 visited_nodes = NULL;
618 }
619
620 /* Give initial reasons why inlining would fail. Those gets
621 either NULLified or usually overwritten by more precise reason
622 later. */
623 static void
624 initialize_inline_failed (struct cgraph_node *node)
625 {
626 struct cgraph_edge *e;
627
628 for (e = node->callers; e; e = e->next_caller)
629 {
630 gcc_assert (!e->callee->global.inlined_to);
631 gcc_assert (e->inline_failed);
632 if (node->local.redefined_extern_inline)
633 e->inline_failed = N_("redefined extern inline functions are not "
634 "considered for inlining");
635 else if (!node->local.inlinable)
636 e->inline_failed = N_("function not inlinable");
637 else
638 e->inline_failed = N_("function not considered for inlining");
639 }
640 }
641
642 /* Rebuild call edges from current function after a passes not aware
643 of cgraph updating. */
644 static unsigned int
645 rebuild_cgraph_edges (void)
646 {
647 basic_block bb;
648 struct cgraph_node *node = cgraph_node (current_function_decl);
649 block_stmt_iterator bsi;
650
651 cgraph_node_remove_callees (node);
652
653 node->count = ENTRY_BLOCK_PTR->count;
654
655 FOR_EACH_BB (bb)
656 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
657 {
658 tree stmt = bsi_stmt (bsi);
659 tree call = get_call_expr_in (stmt);
660 tree decl;
661
662 if (call && (decl = get_callee_fndecl (call)))
663 cgraph_create_edge (node, cgraph_node (decl), stmt,
664 bb->count,
665 bb->loop_depth);
666 }
667 initialize_inline_failed (node);
668 gcc_assert (!node->global.inlined_to);
669 return 0;
670 }
671
672 struct tree_opt_pass pass_rebuild_cgraph_edges =
673 {
674 NULL, /* name */
675 NULL, /* gate */
676 rebuild_cgraph_edges, /* execute */
677 NULL, /* sub */
678 NULL, /* next */
679 0, /* static_pass_number */
680 0, /* tv_id */
681 PROP_cfg, /* properties_required */
682 0, /* properties_provided */
683 0, /* properties_destroyed */
684 0, /* todo_flags_start */
685 0, /* todo_flags_finish */
686 0 /* letter */
687 };
688
689 /* Verify cgraph nodes of given cgraph node. */
690 void
691 verify_cgraph_node (struct cgraph_node *node)
692 {
693 struct cgraph_edge *e;
694 struct cgraph_node *main_clone;
695 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
696 basic_block this_block;
697 block_stmt_iterator bsi;
698 bool error_found = false;
699
700 if (errorcount || sorrycount)
701 return;
702
703 timevar_push (TV_CGRAPH_VERIFY);
704 for (e = node->callees; e; e = e->next_callee)
705 if (e->aux)
706 {
707 error ("aux field set for edge %s->%s",
708 cgraph_node_name (e->caller), cgraph_node_name (e->callee));
709 error_found = true;
710 }
711 if (node->count < 0)
712 {
713 error ("Execution count is negative");
714 error_found = true;
715 }
716 for (e = node->callers; e; e = e->next_caller)
717 {
718 if (e->count < 0)
719 {
720 error ("caller edge count is negative");
721 error_found = true;
722 }
723 if (!e->inline_failed)
724 {
725 if (node->global.inlined_to
726 != (e->caller->global.inlined_to
727 ? e->caller->global.inlined_to : e->caller))
728 {
729 error ("inlined_to pointer is wrong");
730 error_found = true;
731 }
732 if (node->callers->next_caller)
733 {
734 error ("multiple inline callers");
735 error_found = true;
736 }
737 }
738 else
739 if (node->global.inlined_to)
740 {
741 error ("inlined_to pointer set for noninline callers");
742 error_found = true;
743 }
744 }
745 if (!node->callers && node->global.inlined_to)
746 {
747 error ("inlined_to pointer is set but no predecessors found");
748 error_found = true;
749 }
750 if (node->global.inlined_to == node)
751 {
752 error ("inlined_to pointer refers to itself");
753 error_found = true;
754 }
755
756 for (main_clone = cgraph_node (node->decl); main_clone;
757 main_clone = main_clone->next_clone)
758 if (main_clone == node)
759 break;
760 if (!cgraph_node (node->decl))
761 {
762 error ("node not found in cgraph_hash");
763 error_found = true;
764 }
765
766 if (node->analyzed
767 && DECL_SAVED_TREE (node->decl) && !TREE_ASM_WRITTEN (node->decl)
768 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to))
769 {
770 if (this_cfun->cfg)
771 {
772 /* The nodes we're interested in are never shared, so walk
773 the tree ignoring duplicates. */
774 visited_nodes = pointer_set_create ();
775 /* Reach the trees by walking over the CFG, and note the
776 enclosing basic-blocks in the call edges. */
777 FOR_EACH_BB_FN (this_block, this_cfun)
778 for (bsi = bsi_start (this_block); !bsi_end_p (bsi); bsi_next (&bsi))
779 {
780 tree stmt = bsi_stmt (bsi);
781 tree call = get_call_expr_in (stmt);
782 tree decl;
783 if (call && (decl = get_callee_fndecl (call)))
784 {
785 struct cgraph_edge *e = cgraph_edge (node, stmt);
786 if (e)
787 {
788 if (e->aux)
789 {
790 error ("shared call_stmt:");
791 debug_generic_stmt (stmt);
792 error_found = true;
793 }
794 if (e->callee->decl != cgraph_node (decl)->decl
795 && e->inline_failed)
796 {
797 error ("edge points to wrong declaration:");
798 debug_tree (e->callee->decl);
799 fprintf (stderr," Instead of:");
800 debug_tree (decl);
801 }
802 e->aux = (void *)1;
803 }
804 else
805 {
806 error ("missing callgraph edge for call stmt:");
807 debug_generic_stmt (stmt);
808 error_found = true;
809 }
810 }
811 }
812 pointer_set_destroy (visited_nodes);
813 visited_nodes = NULL;
814 }
815 else
816 /* No CFG available?! */
817 gcc_unreachable ();
818
819 for (e = node->callees; e; e = e->next_callee)
820 {
821 if (!e->aux)
822 {
823 error ("edge %s->%s has no corresponding call_stmt",
824 cgraph_node_name (e->caller),
825 cgraph_node_name (e->callee));
826 debug_generic_stmt (e->call_stmt);
827 error_found = true;
828 }
829 e->aux = 0;
830 }
831 }
832 if (error_found)
833 {
834 dump_cgraph_node (stderr, node);
835 internal_error ("verify_cgraph_node failed");
836 }
837 timevar_pop (TV_CGRAPH_VERIFY);
838 }
839
840 /* Verify whole cgraph structure. */
841 void
842 verify_cgraph (void)
843 {
844 struct cgraph_node *node;
845
846 if (sorrycount || errorcount)
847 return;
848
849 for (node = cgraph_nodes; node; node = node->next)
850 verify_cgraph_node (node);
851 }
852
853 /* Output one variable, if necessary. Return whether we output it. */
854 static bool
855 cgraph_varpool_assemble_decl (struct cgraph_varpool_node *node)
856 {
857 tree decl = node->decl;
858
859 if (!TREE_ASM_WRITTEN (decl)
860 && !node->alias
861 && !DECL_EXTERNAL (decl)
862 && (TREE_CODE (decl) != VAR_DECL || !DECL_HAS_VALUE_EXPR_P (decl)))
863 {
864 assemble_variable (decl, 0, 1, 0);
865 return TREE_ASM_WRITTEN (decl);
866 }
867
868 return false;
869 }
870
871 /* Output all variables enqueued to be assembled. */
872 bool
873 cgraph_varpool_assemble_pending_decls (void)
874 {
875 bool changed = false;
876
877 if (errorcount || sorrycount)
878 return false;
879
880 /* EH might mark decls as needed during expansion. This should be safe since
881 we don't create references to new function, but it should not be used
882 elsewhere. */
883 cgraph_varpool_analyze_pending_decls ();
884
885 while (cgraph_varpool_nodes_queue)
886 {
887 struct cgraph_varpool_node *node = cgraph_varpool_nodes_queue;
888
889 cgraph_varpool_nodes_queue = cgraph_varpool_nodes_queue->next_needed;
890 if (cgraph_varpool_assemble_decl (node))
891 {
892 changed = true;
893 node->next_needed = cgraph_varpool_assembled_nodes_queue;
894 cgraph_varpool_assembled_nodes_queue = node;
895 node->finalized = 1;
896 }
897 else
898 node->next_needed = NULL;
899 }
900 return changed;
901 }
902 /* Output all variables enqueued to be assembled. */
903 static void
904 cgraph_varpool_output_debug_info (void)
905 {
906 timevar_push (TV_SYMOUT);
907 if (errorcount == 0 && sorrycount == 0)
908 while (cgraph_varpool_assembled_nodes_queue)
909 {
910 struct cgraph_varpool_node *node = cgraph_varpool_assembled_nodes_queue;
911
912 /* Local static variables are never seen by check_global_declarations
913 so we need to output debug info by hand. */
914 if (DECL_CONTEXT (node->decl)
915 && (TREE_CODE (DECL_CONTEXT (node->decl)) == BLOCK
916 || TREE_CODE (DECL_CONTEXT (node->decl)) == FUNCTION_DECL)
917 && errorcount == 0 && sorrycount == 0)
918 (*debug_hooks->global_decl) (node->decl);
919 cgraph_varpool_assembled_nodes_queue = node->next_needed;
920 node->next_needed = 0;
921 }
922 timevar_pop (TV_SYMOUT);
923 }
924
925 /* Output all asm statements we have stored up to be output. */
926
927 static void
928 cgraph_output_pending_asms (void)
929 {
930 struct cgraph_asm_node *can;
931
932 if (errorcount || sorrycount)
933 return;
934
935 for (can = cgraph_asm_nodes; can; can = can->next)
936 assemble_asm (can->asm_str);
937 cgraph_asm_nodes = NULL;
938 }
939
940 /* Analyze the function scheduled to be output. */
941 void
942 cgraph_analyze_function (struct cgraph_node *node)
943 {
944 tree decl = node->decl;
945
946 current_function_decl = decl;
947 push_cfun (DECL_STRUCT_FUNCTION (decl));
948 cgraph_lower_function (node);
949
950 /* First kill forward declaration so reverse inlining works properly. */
951 cgraph_create_edges (node, decl);
952
953 node->local.inlinable = tree_inlinable_function_p (decl);
954 if (!flag_unit_at_a_time)
955 node->local.self_insns = estimate_num_insns (decl);
956 if (node->local.inlinable)
957 node->local.disregard_inline_limits
958 = lang_hooks.tree_inlining.disregard_inline_limits (decl);
959 initialize_inline_failed (node);
960 if (flag_really_no_inline && !node->local.disregard_inline_limits)
961 node->local.inlinable = 0;
962 /* Inlining characteristics are maintained by the cgraph_mark_inline. */
963 node->global.insns = node->local.self_insns;
964
965 node->analyzed = true;
966 pop_cfun ();
967 current_function_decl = NULL;
968 }
969
970 /* Look for externally_visible and used attributes and mark cgraph nodes
971 accordingly.
972
973 We cannot mark the nodes at the point the attributes are processed (in
974 handle_*_attribute) because the copy of the declarations available at that
975 point may not be canonical. For example, in:
976
977 void f();
978 void f() __attribute__((used));
979
980 the declaration we see in handle_used_attribute will be the second
981 declaration -- but the front end will subsequently merge that declaration
982 with the original declaration and discard the second declaration.
983
984 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
985
986 void f() {}
987 void f() __attribute__((externally_visible));
988
989 is valid.
990
991 So, we walk the nodes at the end of the translation unit, applying the
992 attributes at that point. */
993
994 static void
995 process_function_and_variable_attributes (struct cgraph_node *first,
996 struct cgraph_varpool_node *first_var)
997 {
998 struct cgraph_node *node;
999 struct cgraph_varpool_node *vnode;
1000
1001 for (node = cgraph_nodes; node != first; node = node->next)
1002 {
1003 tree decl = node->decl;
1004 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
1005 {
1006 mark_decl_referenced (decl);
1007 if (node->local.finalized)
1008 cgraph_mark_needed_node (node);
1009 }
1010 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
1011 {
1012 if (! TREE_PUBLIC (node->decl))
1013 warning (OPT_Wattributes,
1014 "%J%<externally_visible%> attribute have effect only on public objects",
1015 node->decl);
1016 else
1017 {
1018 if (node->local.finalized)
1019 cgraph_mark_needed_node (node);
1020 node->local.externally_visible = true;
1021 }
1022 }
1023 }
1024 for (vnode = cgraph_varpool_nodes; vnode != first_var; vnode = vnode->next)
1025 {
1026 tree decl = vnode->decl;
1027 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
1028 {
1029 mark_decl_referenced (decl);
1030 if (vnode->finalized)
1031 cgraph_varpool_mark_needed_node (vnode);
1032 }
1033 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
1034 {
1035 if (! TREE_PUBLIC (vnode->decl))
1036 warning (OPT_Wattributes,
1037 "%J%<externally_visible%> attribute have effect only on public objects",
1038 vnode->decl);
1039 else
1040 {
1041 if (vnode->finalized)
1042 cgraph_varpool_mark_needed_node (vnode);
1043 vnode->externally_visible = true;
1044 }
1045 }
1046 }
1047 }
1048
1049 /* Analyze the whole compilation unit once it is parsed completely. */
1050
1051 void
1052 cgraph_finalize_compilation_unit (void)
1053 {
1054 struct cgraph_node *node, *next;
1055 /* Keep track of already processed nodes when called multiple times for
1056 intermodule optimization. */
1057 static struct cgraph_node *first_analyzed;
1058 struct cgraph_node *first_processed = first_analyzed;
1059 static struct cgraph_varpool_node *first_analyzed_var;
1060
1061 if (errorcount || sorrycount)
1062 return;
1063
1064 finish_aliases_1 ();
1065
1066 if (!flag_unit_at_a_time)
1067 {
1068 cgraph_output_pending_asms ();
1069 cgraph_assemble_pending_functions ();
1070 cgraph_varpool_output_debug_info ();
1071 return;
1072 }
1073
1074 if (!quiet_flag)
1075 {
1076 fprintf (stderr, "\nAnalyzing compilation unit");
1077 fflush (stderr);
1078 }
1079
1080 timevar_push (TV_CGRAPH);
1081 process_function_and_variable_attributes (first_processed,
1082 first_analyzed_var);
1083 first_processed = cgraph_nodes;
1084 first_analyzed_var = cgraph_varpool_nodes;
1085 cgraph_varpool_analyze_pending_decls ();
1086 if (cgraph_dump_file)
1087 {
1088 fprintf (cgraph_dump_file, "Initial entry points:");
1089 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1090 if (node->needed && DECL_SAVED_TREE (node->decl))
1091 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1092 fprintf (cgraph_dump_file, "\n");
1093 }
1094
1095 /* Propagate reachability flag and lower representation of all reachable
1096 functions. In the future, lowering will introduce new functions and
1097 new entry points on the way (by template instantiation and virtual
1098 method table generation for instance). */
1099 while (cgraph_nodes_queue)
1100 {
1101 struct cgraph_edge *edge;
1102 tree decl = cgraph_nodes_queue->decl;
1103
1104 node = cgraph_nodes_queue;
1105 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
1106 node->next_needed = NULL;
1107
1108 /* ??? It is possible to create extern inline function and later using
1109 weak alias attribute to kill its body. See
1110 gcc.c-torture/compile/20011119-1.c */
1111 if (!DECL_SAVED_TREE (decl))
1112 {
1113 cgraph_reset_node (node);
1114 continue;
1115 }
1116
1117 gcc_assert (!node->analyzed && node->reachable);
1118 gcc_assert (DECL_SAVED_TREE (decl));
1119
1120 cgraph_analyze_function (node);
1121
1122 for (edge = node->callees; edge; edge = edge->next_callee)
1123 if (!edge->callee->reachable)
1124 cgraph_mark_reachable_node (edge->callee);
1125
1126 /* We finalize local static variables during constructing callgraph
1127 edges. Process their attributes too. */
1128 process_function_and_variable_attributes (first_processed,
1129 first_analyzed_var);
1130 first_processed = cgraph_nodes;
1131 first_analyzed_var = cgraph_varpool_nodes;
1132 cgraph_varpool_analyze_pending_decls ();
1133 }
1134
1135 /bin/bash: line 1: :Q: command not found
1136 if (cgraph_dump_file)
1137 {
1138 fprintf (cgraph_dump_file, "Unit entry points:");
1139 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1140 if (node->needed && DECL_SAVED_TREE (node->decl))
1141 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1142 fprintf (cgraph_dump_file, "\n\nInitial ");
1143 dump_cgraph (cgraph_dump_file);
1144 }
1145
1146 if (cgraph_dump_file)
1147 fprintf (cgraph_dump_file, "\nReclaiming functions:");
1148
1149 for (node = cgraph_nodes; node != first_analyzed; node = next)
1150 {
1151 tree decl = node->decl;
1152 next = node->next;
1153
1154 if (node->local.finalized && !DECL_SAVED_TREE (decl))
1155 cgraph_reset_node (node);
1156
1157 if (!node->reachable && DECL_SAVED_TREE (decl))
1158 {
1159 if (cgraph_dump_file)
1160 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1161 cgraph_remove_node (node);
1162 continue;
1163 }
1164 else
1165 node->next_needed = NULL;
1166 gcc_assert (!node->local.finalized || DECL_SAVED_TREE (decl));
1167 gcc_assert (node->analyzed == node->local.finalized);
1168 }
1169 if (cgraph_dump_file)
1170 {
1171 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1172 dump_cgraph (cgraph_dump_file);
1173 }
1174 first_analyzed = cgraph_nodes;
1175 ggc_collect ();
1176 timevar_pop (TV_CGRAPH);
1177 }
1178 /* Figure out what functions we want to assemble. */
1179
1180 static void
1181 cgraph_mark_functions_to_output (void)
1182 {
1183 struct cgraph_node *node;
1184
1185 for (node = cgraph_nodes; node; node = node->next)
1186 {
1187 tree decl = node->decl;
1188 struct cgraph_edge *e;
1189
1190 gcc_assert (!node->output);
1191
1192 for (e = node->callers; e; e = e->next_caller)
1193 if (e->inline_failed)
1194 break;
1195
1196 /* We need to output all local functions that are used and not
1197 always inlined, as well as those that are reachable from
1198 outside the current compilation unit. */
1199 if (DECL_SAVED_TREE (decl)
1200 && !node->global.inlined_to
1201 && (node->needed
1202 || (e && node->reachable))
1203 && !TREE_ASM_WRITTEN (decl)
1204 && !DECL_EXTERNAL (decl))
1205 node->output = 1;
1206 else
1207 {
1208 /* We should've reclaimed all functions that are not needed. */
1209 #ifdef ENABLE_CHECKING
1210 if (!node->global.inlined_to && DECL_SAVED_TREE (decl)
1211 && !DECL_EXTERNAL (decl))
1212 {
1213 dump_cgraph_node (stderr, node);
1214 internal_error ("failed to reclaim unneeded function");
1215 }
1216 #endif
1217 gcc_assert (node->global.inlined_to || !DECL_SAVED_TREE (decl)
1218 || DECL_EXTERNAL (decl));
1219
1220 }
1221
1222 }
1223 }
1224
1225 /* Expand function specified by NODE. */
1226
1227 static void
1228 cgraph_expand_function (struct cgraph_node *node)
1229 {
1230 tree decl = node->decl;
1231
1232 /* We ought to not compile any inline clones. */
1233 gcc_assert (!node->global.inlined_to);
1234
1235 if (flag_unit_at_a_time)
1236 announce_function (decl);
1237
1238 cgraph_lower_function (node);
1239
1240 /* Generate RTL for the body of DECL. */
1241 lang_hooks.callgraph.expand_function (decl);
1242
1243 /* Make sure that BE didn't give up on compiling. */
1244 /* ??? Can happen with nested function of extern inline. */
1245 gcc_assert (TREE_ASM_WRITTEN (node->decl));
1246
1247 current_function_decl = NULL;
1248 if (!cgraph_preserve_function_body_p (node->decl))
1249 {
1250 DECL_SAVED_TREE (node->decl) = NULL;
1251 DECL_STRUCT_FUNCTION (node->decl) = NULL;
1252 DECL_INITIAL (node->decl) = error_mark_node;
1253 /* Eliminate all call edges. This is important so the call_expr no longer
1254 points to the dead function body. */
1255 cgraph_node_remove_callees (node);
1256 }
1257
1258 cgraph_function_flags_ready = true;
1259 }
1260
1261 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
1262
1263 bool
1264 cgraph_inline_p (struct cgraph_edge *e, const char **reason)
1265 {
1266 *reason = e->inline_failed;
1267 return !e->inline_failed;
1268 }
1269
1270
1271
1272 /* Expand all functions that must be output.
1273
1274 Attempt to topologically sort the nodes so function is output when
1275 all called functions are already assembled to allow data to be
1276 propagated across the callgraph. Use a stack to get smaller distance
1277 between a function and its callees (later we may choose to use a more
1278 sophisticated algorithm for function reordering; we will likely want
1279 to use subsections to make the output functions appear in top-down
1280 order). */
1281
1282 static void
1283 cgraph_expand_all_functions (void)
1284 {
1285 struct cgraph_node *node;
1286 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1287 int order_pos = 0, new_order_pos = 0;
1288 int i;
1289
1290 order_pos = cgraph_postorder (order);
1291 gcc_assert (order_pos == cgraph_n_nodes);
1292
1293 /* Garbage collector may remove inline clones we eliminate during
1294 optimization. So we must be sure to not reference them. */
1295 for (i = 0; i < order_pos; i++)
1296 if (order[i]->output)
1297 order[new_order_pos++] = order[i];
1298
1299 for (i = new_order_pos - 1; i >= 0; i--)
1300 {
1301 node = order[i];
1302 if (node->output)
1303 {
1304 gcc_assert (node->reachable);
1305 node->output = 0;
1306 cgraph_expand_function (node);
1307 }
1308 }
1309
1310 free (order);
1311
1312 /* Process CGRAPH_EXPAND_QUEUE, these are functions created during
1313 the expansion process. Note that this queue may grow as its
1314 being processed, as the new functions may generate new ones. */
1315 while (cgraph_expand_queue)
1316 {
1317 node = cgraph_expand_queue;
1318 cgraph_expand_queue = cgraph_expand_queue->next_needed;
1319 node->next_needed = NULL;
1320 node->output = 0;
1321 node->lowered = DECL_STRUCT_FUNCTION (node->decl)->cfg != NULL;
1322 cgraph_expand_function (node);
1323 }
1324 }
1325
1326 /* This is used to sort the node types by the cgraph order number. */
1327
1328 struct cgraph_order_sort
1329 {
1330 enum { ORDER_UNDEFINED = 0, ORDER_FUNCTION, ORDER_VAR, ORDER_ASM } kind;
1331 union
1332 {
1333 struct cgraph_node *f;
1334 struct cgraph_varpool_node *v;
1335 struct cgraph_asm_node *a;
1336 } u;
1337 };
1338
1339 /* Output all functions, variables, and asm statements in the order
1340 according to their order fields, which is the order in which they
1341 appeared in the file. This implements -fno-toplevel-reorder. In
1342 this mode we may output functions and variables which don't really
1343 need to be output. */
1344
1345 static void
1346 cgraph_output_in_order (void)
1347 {
1348 int max;
1349 size_t size;
1350 struct cgraph_order_sort *nodes;
1351 int i;
1352 struct cgraph_node *pf;
1353 struct cgraph_varpool_node *pv;
1354 struct cgraph_asm_node *pa;
1355
1356 max = cgraph_order;
1357 size = max * sizeof (struct cgraph_order_sort);
1358 nodes = (struct cgraph_order_sort *) alloca (size);
1359 memset (nodes, 0, size);
1360
1361 cgraph_varpool_analyze_pending_decls ();
1362
1363 for (pf = cgraph_nodes; pf; pf = pf->next)
1364 {
1365 if (pf->output)
1366 {
1367 i = pf->order;
1368 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1369 nodes[i].kind = ORDER_FUNCTION;
1370 nodes[i].u.f = pf;
1371 }
1372 }
1373
1374 for (pv = cgraph_varpool_nodes_queue; pv; pv = pv->next_needed)
1375 {
1376 i = pv->order;
1377 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1378 nodes[i].kind = ORDER_VAR;
1379 nodes[i].u.v = pv;
1380 }
1381
1382 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1383 {
1384 i = pa->order;
1385 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1386 nodes[i].kind = ORDER_ASM;
1387 nodes[i].u.a = pa;
1388 }
1389
1390 for (i = 0; i < max; ++i)
1391 {
1392 switch (nodes[i].kind)
1393 {
1394 case ORDER_FUNCTION:
1395 nodes[i].u.f->output = 0;
1396 cgraph_expand_function (nodes[i].u.f);
1397 break;
1398
1399 case ORDER_VAR:
1400 cgraph_varpool_assemble_decl (nodes[i].u.v);
1401 break;
1402
1403 case ORDER_ASM:
1404 assemble_asm (nodes[i].u.a->asm_str);
1405 break;
1406
1407 case ORDER_UNDEFINED:
1408 break;
1409
1410 default:
1411 gcc_unreachable ();
1412 }
1413 }
1414
1415 cgraph_asm_nodes = NULL;
1416 }
1417
1418 /* Mark visibility of all functions.
1419
1420 A local function is one whose calls can occur only in the current
1421 compilation unit and all its calls are explicit, so we can change
1422 its calling convention. We simply mark all static functions whose
1423 address is not taken as local.
1424
1425 We also change the TREE_PUBLIC flag of all declarations that are public
1426 in language point of view but we want to overwrite this default
1427 via visibilities for the backend point of view. */
1428
1429 static void
1430 cgraph_function_and_variable_visibility (void)
1431 {
1432 struct cgraph_node *node;
1433 struct cgraph_varpool_node *vnode;
1434
1435 for (node = cgraph_nodes; node; node = node->next)
1436 {
1437 if (node->reachable
1438 && (DECL_COMDAT (node->decl)
1439 || (!flag_whole_program
1440 && TREE_PUBLIC (node->decl) && !DECL_EXTERNAL (node->decl))))
1441 node->local.externally_visible = true;
1442 if (!node->local.externally_visible && node->analyzed
1443 && !DECL_EXTERNAL (node->decl))
1444 {
1445 gcc_assert (flag_whole_program || !TREE_PUBLIC (node->decl));
1446 TREE_PUBLIC (node->decl) = 0;
1447 }
1448 node->local.local = (!node->needed
1449 && node->analyzed
1450 && !DECL_EXTERNAL (node->decl)
1451 && !node->local.externally_visible);
1452 }
1453 for (vnode = cgraph_varpool_nodes_queue; vnode; vnode = vnode->next_needed)
1454 {
1455 if (vnode->needed
1456 && !flag_whole_program
1457 && (DECL_COMDAT (vnode->decl) || TREE_PUBLIC (vnode->decl)))
1458 vnode->externally_visible = 1;
1459 if (!vnode->externally_visible)
1460 {
1461 gcc_assert (flag_whole_program || !TREE_PUBLIC (vnode->decl));
1462 TREE_PUBLIC (vnode->decl) = 0;
1463 }
1464 gcc_assert (TREE_STATIC (vnode->decl));
1465 }
1466
1467 /* Because we have to be conservative on the boundaries of source
1468 level units, it is possible that we marked some functions in
1469 reachable just because they might be used later via external
1470 linkage, but after making them local they are really unreachable
1471 now. */
1472 cgraph_remove_unreachable_nodes (true, cgraph_dump_file);
1473
1474 if (cgraph_dump_file)
1475 {
1476 fprintf (cgraph_dump_file, "\nMarking local functions:");
1477 for (node = cgraph_nodes; node; node = node->next)
1478 if (node->local.local)
1479 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1480 fprintf (cgraph_dump_file, "\n\n");
1481 fprintf (cgraph_dump_file, "\nMarking externally visible functions:");
1482 for (node = cgraph_nodes; node; node = node->next)
1483 if (node->local.externally_visible)
1484 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1485 fprintf (cgraph_dump_file, "\n\n");
1486 }
1487 cgraph_function_flags_ready = true;
1488 }
1489
1490 /* Return true when function body of DECL still needs to be kept around
1491 for later re-use. */
1492 bool
1493 cgraph_preserve_function_body_p (tree decl)
1494 {
1495 struct cgraph_node *node;
1496 if (!cgraph_global_info_ready)
1497 return (DECL_INLINE (decl) && !flag_really_no_inline);
1498 /* Look if there is any clone around. */
1499 for (node = cgraph_node (decl); node; node = node->next_clone)
1500 if (node->global.inlined_to)
1501 return true;
1502 return false;
1503 }
1504
1505 static void
1506 ipa_passes (void)
1507 {
1508 cfun = NULL;
1509 tree_register_cfg_hooks ();
1510 bitmap_obstack_initialize (NULL);
1511 execute_ipa_pass_list (all_ipa_passes);
1512 bitmap_obstack_release (NULL);
1513 }
1514
1515 /* Perform simple optimizations based on callgraph. */
1516
1517 void
1518 cgraph_optimize (void)
1519 {
1520 if (errorcount || sorrycount)
1521 return;
1522
1523 #ifdef ENABLE_CHECKING
1524 verify_cgraph ();
1525 #endif
1526 if (!flag_unit_at_a_time)
1527 {
1528 cgraph_output_pending_asms ();
1529 cgraph_varpool_assemble_pending_decls ();
1530 cgraph_varpool_output_debug_info ();
1531 return;
1532 }
1533
1534 process_pending_assemble_externals ();
1535
1536 /* Frontend may output common variables after the unit has been finalized.
1537 It is safe to deal with them here as they are always zero initialized. */
1538 cgraph_varpool_analyze_pending_decls ();
1539
1540 timevar_push (TV_CGRAPHOPT);
1541 if (!quiet_flag)
1542 fprintf (stderr, "Performing interprocedural optimizations\n");
1543
1544 cgraph_function_and_variable_visibility ();
1545 if (cgraph_dump_file)
1546 {
1547 fprintf (cgraph_dump_file, "Marked ");
1548 dump_cgraph (cgraph_dump_file);
1549 }
1550
1551 /* Don't run the IPA passes if there was any error or sorry messages. */
1552 if (errorcount == 0 && sorrycount == 0)
1553 ipa_passes ();
1554
1555 /* This pass remove bodies of extern inline functions we never inlined.
1556 Do this later so other IPA passes see what is really going on. */
1557 cgraph_remove_unreachable_nodes (false, dump_file);
1558 cgraph_increase_alignment ();
1559 cgraph_global_info_ready = true;
1560 if (cgraph_dump_file)
1561 {
1562 fprintf (cgraph_dump_file, "Optimized ");
1563 dump_cgraph (cgraph_dump_file);
1564 dump_varpool (cgraph_dump_file);
1565 }
1566 timevar_pop (TV_CGRAPHOPT);
1567
1568 /* Output everything. */
1569 if (!quiet_flag)
1570 fprintf (stderr, "Assembling functions:\n");
1571 #ifdef ENABLE_CHECKING
1572 verify_cgraph ();
1573 #endif
1574
1575 cgraph_mark_functions_to_output ();
1576
1577 if (!flag_toplevel_reorder)
1578 cgraph_output_in_order ();
1579 else
1580 {
1581 cgraph_output_pending_asms ();
1582
1583 cgraph_expand_all_functions ();
1584 cgraph_varpool_remove_unreferenced_decls ();
1585
1586 cgraph_varpool_assemble_pending_decls ();
1587 cgraph_varpool_output_debug_info ();
1588 }
1589
1590 if (cgraph_dump_file)
1591 {
1592 fprintf (cgraph_dump_file, "\nFinal ");
1593 dump_cgraph (cgraph_dump_file);
1594 }
1595 #ifdef ENABLE_CHECKING
1596 verify_cgraph ();
1597 /* Double check that all inline clones are gone and that all
1598 function bodies have been released from memory. */
1599 if (flag_unit_at_a_time
1600 && !(sorrycount || errorcount))
1601 {
1602 struct cgraph_node *node;
1603 bool error_found = false;
1604
1605 for (node = cgraph_nodes; node; node = node->next)
1606 if (node->analyzed
1607 && (node->global.inlined_to
1608 || DECL_SAVED_TREE (node->decl)))
1609 {
1610 error_found = true;
1611 dump_cgraph_node (stderr, node);
1612 }
1613 if (error_found)
1614 internal_error ("nodes with no released memory found");
1615 }
1616 #endif
1617 }
1618
1619 /* Increase alignment of global arrays to improve vectorization potential.
1620 TODO:
1621 - Consider also structs that have an array field.
1622 - Use ipa analysis to prune arrays that can't be vectorized?
1623 This should involve global alignment analysis and in the future also
1624 array padding. */
1625
1626 static void
1627 cgraph_increase_alignment (void)
1628 {
1629 if (flag_section_anchors && flag_tree_vectorize)
1630 {
1631 struct cgraph_varpool_node *vnode;
1632
1633 /* Increase the alignment of all global arrays for vectorization. */
1634 for (vnode = cgraph_varpool_nodes_queue;
1635 vnode;
1636 vnode = vnode->next_needed)
1637 {
1638 tree vectype, decl = vnode->decl;
1639 unsigned int alignment;
1640
1641 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
1642 continue;
1643 vectype = get_vectype_for_scalar_type (TREE_TYPE (TREE_TYPE (decl)));
1644 if (!vectype)
1645 continue;
1646 alignment = TYPE_ALIGN (vectype);
1647 if (DECL_ALIGN (decl) >= alignment)
1648 continue;
1649
1650 if (vect_can_force_dr_alignment_p (decl, alignment))
1651 {
1652 DECL_ALIGN (decl) = TYPE_ALIGN (vectype);
1653 DECL_USER_ALIGN (decl) = 1;
1654 if (cgraph_dump_file)
1655 {
1656 fprintf (cgraph_dump_file, "Increasing alignment of decl: ");
1657 print_generic_expr (cgraph_dump_file, decl, TDF_SLIM);
1658 }
1659 }
1660 }
1661 }
1662 }
1663
1664 /* Generate and emit a static constructor or destructor. WHICH must be
1665 one of 'I' or 'D'. BODY should be a STATEMENT_LIST containing
1666 GENERIC statements. */
1667
1668 void
1669 cgraph_build_static_cdtor (char which, tree body, int priority)
1670 {
1671 static int counter = 0;
1672 char which_buf[16];
1673 tree decl, name, resdecl;
1674
1675 sprintf (which_buf, "%c_%d", which, counter++);
1676 name = get_file_function_name_long (which_buf);
1677
1678 decl = build_decl (FUNCTION_DECL, name,
1679 build_function_type (void_type_node, void_list_node));
1680 current_function_decl = decl;
1681
1682 resdecl = build_decl (RESULT_DECL, NULL_TREE, void_type_node);
1683 DECL_ARTIFICIAL (resdecl) = 1;
1684 DECL_IGNORED_P (resdecl) = 1;
1685 DECL_RESULT (decl) = resdecl;
1686
1687 allocate_struct_function (decl);
1688
1689 TREE_STATIC (decl) = 1;
1690 TREE_USED (decl) = 1;
1691 DECL_ARTIFICIAL (decl) = 1;
1692 DECL_IGNORED_P (decl) = 1;
1693 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1694 DECL_SAVED_TREE (decl) = body;
1695 TREE_PUBLIC (decl) = ! targetm.have_ctors_dtors;
1696 DECL_UNINLINABLE (decl) = 1;
1697
1698 DECL_INITIAL (decl) = make_node (BLOCK);
1699 TREE_USED (DECL_INITIAL (decl)) = 1;
1700
1701 DECL_SOURCE_LOCATION (decl) = input_location;
1702 cfun->function_end_locus = input_location;
1703
1704 switch (which)
1705 {
1706 case 'I':
1707 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1708 break;
1709 case 'D':
1710 DECL_STATIC_DESTRUCTOR (decl) = 1;
1711 break;
1712 default:
1713 gcc_unreachable ();
1714 }
1715
1716 gimplify_function_tree (decl);
1717
1718 /* ??? We will get called LATE in the compilation process. */
1719 if (cgraph_global_info_ready)
1720 {
1721 tree_lowering_passes (decl);
1722 tree_rest_of_compilation (decl);
1723 }
1724 else
1725 cgraph_finalize_function (decl, 0);
1726
1727 if (targetm.have_ctors_dtors)
1728 {
1729 void (*fn) (rtx, int);
1730
1731 if (which == 'I')
1732 fn = targetm.asm_out.constructor;
1733 else
1734 fn = targetm.asm_out.destructor;
1735 fn (XEXP (DECL_RTL (decl), 0), priority);
1736 }
1737 }
1738
1739 void
1740 init_cgraph (void)
1741 {
1742 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1743 }
1744
1745 /* The edges representing the callers of the NEW_VERSION node were
1746 fixed by cgraph_function_versioning (), now the call_expr in their
1747 respective tree code should be updated to call the NEW_VERSION. */
1748
1749 static void
1750 update_call_expr (struct cgraph_node *new_version)
1751 {
1752 struct cgraph_edge *e;
1753
1754 gcc_assert (new_version);
1755 for (e = new_version->callers; e; e = e->next_caller)
1756 /* Update the call expr on the edges
1757 to call the new version. */
1758 TREE_OPERAND (TREE_OPERAND (get_call_expr_in (e->call_stmt), 0), 0) = new_version->decl;
1759 }
1760
1761
1762 /* Create a new cgraph node which is the new version of
1763 OLD_VERSION node. REDIRECT_CALLERS holds the callers
1764 edges which should be redirected to point to
1765 NEW_VERSION. ALL the callees edges of OLD_VERSION
1766 are cloned to the new version node. Return the new
1767 version node. */
1768
1769 static struct cgraph_node *
1770 cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
1771 tree new_decl,
1772 VEC(cgraph_edge_p,heap) *redirect_callers)
1773 {
1774 struct cgraph_node *new_version;
1775 struct cgraph_edge *e, *new_e;
1776 struct cgraph_edge *next_callee;
1777 unsigned i;
1778
1779 gcc_assert (old_version);
1780
1781 new_version = cgraph_node (new_decl);
1782
1783 new_version->analyzed = true;
1784 new_version->local = old_version->local;
1785 new_version->global = old_version->global;
1786 new_version->rtl = new_version->rtl;
1787 new_version->reachable = true;
1788 new_version->count = old_version->count;
1789
1790 /* Clone the old node callees. Recursive calls are
1791 also cloned. */
1792 for (e = old_version->callees;e; e=e->next_callee)
1793 {
1794 new_e = cgraph_clone_edge (e, new_version, e->call_stmt, 0, e->loop_nest, true);
1795 new_e->count = e->count;
1796 }
1797 /* Fix recursive calls.
1798 If OLD_VERSION has a recursive call after the
1799 previous edge cloning, the new version will have an edge
1800 pointing to the old version, which is wrong;
1801 Redirect it to point to the new version. */
1802 for (e = new_version->callees ; e; e = next_callee)
1803 {
1804 next_callee = e->next_callee;
1805 if (e->callee == old_version)
1806 cgraph_redirect_edge_callee (e, new_version);
1807
1808 if (!next_callee)
1809 break;
1810 }
1811 for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
1812 {
1813 /* Redirect calls to the old version node to point to its new
1814 version. */
1815 cgraph_redirect_edge_callee (e, new_version);
1816 }
1817
1818 return new_version;
1819 }
1820
1821 /* Perform function versioning.
1822 Function versioning includes copying of the tree and
1823 a callgraph update (creating a new cgraph node and updating
1824 its callees and callers).
1825
1826 REDIRECT_CALLERS varray includes the edges to be redirected
1827 to the new version.
1828
1829 TREE_MAP is a mapping of tree nodes we want to replace with
1830 new ones (according to results of prior analysis).
1831 OLD_VERSION_NODE is the node that is versioned.
1832 It returns the new version's cgraph node. */
1833
1834 struct cgraph_node *
1835 cgraph_function_versioning (struct cgraph_node *old_version_node,
1836 VEC(cgraph_edge_p,heap) *redirect_callers,
1837 varray_type tree_map)
1838 {
1839 tree old_decl = old_version_node->decl;
1840 struct cgraph_node *new_version_node = NULL;
1841 tree new_decl;
1842
1843 if (!tree_versionable_function_p (old_decl))
1844 return NULL;
1845
1846 /* Make a new FUNCTION_DECL tree node for the
1847 new version. */
1848 new_decl = copy_node (old_decl);
1849
1850 /* Create the new version's call-graph node.
1851 and update the edges of the new node. */
1852 new_version_node =
1853 cgraph_copy_node_for_versioning (old_version_node, new_decl,
1854 redirect_callers);
1855
1856 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1857 tree_function_versioning (old_decl, new_decl, tree_map, false);
1858 /* Update the call_expr on the edges to call the new version node. */
1859 update_call_expr (new_version_node);
1860
1861 /* Update the new version's properties.
1862 Make The new version visible only within this translation unit.
1863 ??? We cannot use COMDAT linkage because there is no
1864 ABI support for this. */
1865 DECL_EXTERNAL (new_version_node->decl) = 0;
1866 DECL_ONE_ONLY (new_version_node->decl) = 0;
1867 TREE_PUBLIC (new_version_node->decl) = 0;
1868 DECL_COMDAT (new_version_node->decl) = 0;
1869 new_version_node->local.externally_visible = 0;
1870 new_version_node->local.local = 1;
1871 new_version_node->lowered = true;
1872 return new_version_node;
1873 }
1874
1875 /* Produce separate function body for inline clones so the offline copy can be
1876 modified without affecting them. */
1877 struct cgraph_node *
1878 save_inline_function_body (struct cgraph_node *node)
1879 {
1880 struct cgraph_node *first_clone;
1881
1882 gcc_assert (node == cgraph_node (node->decl));
1883
1884 cgraph_lower_function (node);
1885
1886 /* In non-unit-at-a-time we construct full fledged clone we never output to
1887 assembly file. This clone is pointed out by inline_decl of original function
1888 and inlining infrastructure knows how to deal with this. */
1889 if (!flag_unit_at_a_time)
1890 {
1891 struct cgraph_edge *e;
1892
1893 first_clone = cgraph_clone_node (node, node->count, 0, false);
1894 first_clone->needed = 0;
1895 first_clone->reachable = 1;
1896 /* Recursively clone all bodies. */
1897 for (e = first_clone->callees; e; e = e->next_callee)
1898 if (!e->inline_failed)
1899 cgraph_clone_inlined_nodes (e, true, false);
1900 }
1901 else
1902 first_clone = node->next_clone;
1903
1904 first_clone->decl = copy_node (node->decl);
1905 node->next_clone = NULL;
1906 if (!flag_unit_at_a_time)
1907 node->inline_decl = first_clone->decl;
1908 first_clone->prev_clone = NULL;
1909 cgraph_insert_node_to_hashtable (first_clone);
1910 gcc_assert (first_clone == cgraph_node (first_clone->decl));
1911
1912 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1913 tree_function_versioning (node->decl, first_clone->decl, NULL, true);
1914
1915 DECL_EXTERNAL (first_clone->decl) = 0;
1916 DECL_ONE_ONLY (first_clone->decl) = 0;
1917 TREE_PUBLIC (first_clone->decl) = 0;
1918 DECL_COMDAT (first_clone->decl) = 0;
1919
1920 for (node = first_clone->next_clone; node; node = node->next_clone)
1921 node->decl = first_clone->decl;
1922 #ifdef ENABLE_CHECKING
1923 verify_cgraph_node (first_clone);
1924 #endif
1925 return first_clone;
1926 }
1927
1928 #include "gt-cgraphunit.h"