re PR c++/28250 (ICE with invalid catch)
[gcc.git] / gcc / cgraphunit.c
1 /* Callgraph based intraprocedural optimizations.
2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
21
22 /* This module implements main driver of compilation process as well as
23 few basic intraprocedural optimizers.
24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
35 (There is one exception needed for implementing GCC extern inline function.)
36
37 - cgraph_varpool_finalize_variable
38
39 This function has same behavior as the above but is used for static
40 variables.
41
42 - cgraph_finalize_compilation_unit
43
44 This function is called once compilation unit is finalized and it will
45 no longer change.
46
47 In the unit-at-a-time the call-graph construction and local function
48 analysis takes place here. Bodies of unreachable functions are released
49 to conserve memory usage.
50
51 ??? The compilation unit in this point of view should be compilation
52 unit as defined by the language - for instance C frontend allows multiple
53 compilation units to be parsed at once and it should call function each
54 time parsing is done so we save memory.
55
56 - cgraph_optimize
57
58 In this unit-at-a-time compilation the intra procedural analysis takes
59 place here. In particular the static functions whose address is never
60 taken are marked as local. Backend can then use this information to
61 modify calling conventions, do better inlining or similar optimizations.
62
63 - cgraph_assemble_pending_functions
64 - cgraph_varpool_assemble_pending_variables
65
66 In non-unit-at-a-time mode these functions can be used to force compilation
67 of functions or variables that are known to be needed at given stage
68 of compilation
69
70 - cgraph_mark_needed_node
71 - cgraph_varpool_mark_needed_node
72
73 When function or variable is referenced by some hidden way (for instance
74 via assembly code and marked by attribute "used"), the call-graph data structure
75 must be updated accordingly by this function.
76
77 - analyze_expr callback
78
79 This function is responsible for lowering tree nodes not understood by
80 generic code into understandable ones or alternatively marking
81 callgraph and varpool nodes referenced by the as needed.
82
83 ??? On the tree-ssa genericizing should take place here and we will avoid
84 need for these hooks (replacing them by genericizing hook)
85
86 - expand_function callback
87
88 This function is used to expand function and pass it into RTL back-end.
89 Front-end should not make any assumptions about when this function can be
90 called. In particular cgraph_assemble_pending_functions,
91 cgraph_varpool_assemble_pending_variables, cgraph_finalize_function,
92 cgraph_varpool_finalize_function, cgraph_optimize can cause arbitrarily
93 previously finalized functions to be expanded.
94
95 We implement two compilation modes.
96
97 - unit-at-a-time: In this mode analyzing of all functions is deferred
98 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
99
100 In cgraph_finalize_compilation_unit the reachable functions are
101 analyzed. During analysis the call-graph edges from reachable
102 functions are constructed and their destinations are marked as
103 reachable. References to functions and variables are discovered too
104 and variables found to be needed output to the assembly file. Via
105 mark_referenced call in assemble_variable functions referenced by
106 static variables are noticed too.
107
108 The intra-procedural information is produced and its existence
109 indicated by global_info_ready. Once this flag is set it is impossible
110 to change function from !reachable to reachable and thus
111 assemble_variable no longer call mark_referenced.
112
113 Finally the call-graph is topologically sorted and all reachable functions
114 that has not been completely inlined or are not external are output.
115
116 ??? It is possible that reference to function or variable is optimized
117 out. We can not deal with this nicely because topological order is not
118 suitable for it. For tree-ssa we may consider another pass doing
119 optimization and re-discovering reachable functions.
120
121 ??? Reorganize code so variables are output very last and only if they
122 really has been referenced by produced code, so we catch more cases
123 where reference has been optimized out.
124
125 - non-unit-at-a-time
126
127 All functions are variables are output as early as possible to conserve
128 memory consumption. This may or may not result in less memory used but
129 it is still needed for some legacy code that rely on particular ordering
130 of things output from the compiler.
131
132 Varpool data structures are not used and variables are output directly.
133
134 Functions are output early using call of
135 cgraph_assemble_pending_function from cgraph_finalize_function. The
136 decision on whether function is needed is made more conservative so
137 uninlininable static functions are needed too. During the call-graph
138 construction the edge destinations are not marked as reachable and it
139 is completely relied upn assemble_variable to mark them. */
140
141
142 #include "config.h"
143 #include "system.h"
144 #include "coretypes.h"
145 #include "tm.h"
146 #include "tree.h"
147 #include "rtl.h"
148 #include "tree-flow.h"
149 #include "tree-inline.h"
150 #include "langhooks.h"
151 #include "pointer-set.h"
152 #include "toplev.h"
153 #include "flags.h"
154 #include "ggc.h"
155 #include "debug.h"
156 #include "target.h"
157 #include "cgraph.h"
158 #include "diagnostic.h"
159 #include "timevar.h"
160 #include "params.h"
161 #include "fibheap.h"
162 #include "c-common.h"
163 #include "intl.h"
164 #include "function.h"
165 #include "ipa-prop.h"
166 #include "tree-gimple.h"
167 #include "tree-pass.h"
168 #include "output.h"
169
170 static void cgraph_expand_all_functions (void);
171 static void cgraph_mark_functions_to_output (void);
172 static void cgraph_expand_function (struct cgraph_node *);
173 static tree record_reference (tree *, int *, void *);
174 static void cgraph_output_pending_asms (void);
175
176 /* Records tree nodes seen in record_reference. Simply using
177 walk_tree_without_duplicates doesn't guarantee each node is visited
178 once because it gets a new htab upon each recursive call from
179 record_reference itself. */
180 static struct pointer_set_t *visited_nodes;
181
182 static FILE *cgraph_dump_file;
183
184 /* Determine if function DECL is needed. That is, visible to something
185 either outside this translation unit, something magic in the system
186 configury, or (if not doing unit-at-a-time) to something we havn't
187 seen yet. */
188
189 static bool
190 decide_is_function_needed (struct cgraph_node *node, tree decl)
191 {
192 tree origin;
193 if (MAIN_NAME_P (DECL_NAME (decl))
194 && TREE_PUBLIC (decl))
195 {
196 node->local.externally_visible = true;
197 return true;
198 }
199
200 /* If the user told us it is used, then it must be so. */
201 if (node->local.externally_visible)
202 return true;
203
204 if (!flag_unit_at_a_time && lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
205 return true;
206
207 /* ??? If the assembler name is set by hand, it is possible to assemble
208 the name later after finalizing the function and the fact is noticed
209 in assemble_name then. This is arguably a bug. */
210 if (DECL_ASSEMBLER_NAME_SET_P (decl)
211 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
212 return true;
213
214 /* If we decided it was needed before, but at the time we didn't have
215 the body of the function available, then it's still needed. We have
216 to go back and re-check its dependencies now. */
217 if (node->needed)
218 return true;
219
220 /* Externally visible functions must be output. The exception is
221 COMDAT functions that must be output only when they are needed.
222
223 When not optimizing, also output the static functions. (see
224 PR24561), but don't do so for always_inline functions, functions
225 declared inline and nested functions. These was optimized out
226 in the original implementation and it is unclear whether we want
227 to change the behavior here. */
228 if (((TREE_PUBLIC (decl)
229 || (!optimize && !node->local.disregard_inline_limits
230 && !DECL_DECLARED_INLINE_P (decl)
231 && !node->origin))
232 && !flag_whole_program)
233 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
234 return true;
235
236 /* Constructors and destructors are reachable from the runtime by
237 some mechanism. */
238 if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
239 return true;
240
241 if (flag_unit_at_a_time)
242 return false;
243
244 /* If not doing unit at a time, then we'll only defer this function
245 if its marked for inlining. Otherwise we want to emit it now. */
246
247 /* "extern inline" functions are never output locally. */
248 if (DECL_EXTERNAL (decl))
249 return false;
250 /* Nested functions of extern inline function shall not be emit unless
251 we inlined the origin. */
252 for (origin = decl_function_context (decl); origin;
253 origin = decl_function_context (origin))
254 if (DECL_EXTERNAL (origin))
255 return false;
256 /* We want to emit COMDAT functions only when absolutely necessary. */
257 if (DECL_COMDAT (decl))
258 return false;
259 if (!DECL_INLINE (decl)
260 || (!node->local.disregard_inline_limits
261 /* When declared inline, defer even the uninlinable functions.
262 This allows them to be eliminated when unused. */
263 && !DECL_DECLARED_INLINE_P (decl)
264 && (!node->local.inlinable || !cgraph_default_inline_p (node, NULL))))
265 return true;
266
267 return false;
268 }
269
270 /* Walk the decls we marked as necessary and see if they reference new
271 variables or functions and add them into the worklists. */
272 static bool
273 cgraph_varpool_analyze_pending_decls (void)
274 {
275 bool changed = false;
276 timevar_push (TV_CGRAPH);
277
278 while (cgraph_varpool_first_unanalyzed_node)
279 {
280 tree decl = cgraph_varpool_first_unanalyzed_node->decl;
281
282 cgraph_varpool_first_unanalyzed_node->analyzed = true;
283
284 cgraph_varpool_first_unanalyzed_node = cgraph_varpool_first_unanalyzed_node->next_needed;
285
286 /* Compute the alignment early so function body expanders are
287 already informed about increased alignment. */
288 align_variable (decl, 0);
289
290 if (DECL_INITIAL (decl))
291 {
292 visited_nodes = pointer_set_create ();
293 walk_tree (&DECL_INITIAL (decl), record_reference, NULL, visited_nodes);
294 pointer_set_destroy (visited_nodes);
295 visited_nodes = NULL;
296 }
297 changed = true;
298 }
299 timevar_pop (TV_CGRAPH);
300 return changed;
301 }
302
303 /* Optimization of function bodies might've rendered some variables as
304 unnecessary so we want to avoid these from being compiled.
305
306 This is done by pruning the queue and keeping only the variables that
307 really appear needed (ie they are either externally visible or referenced
308 by compiled function). Re-doing the reachability analysis on variables
309 brings back the remaining variables referenced by these. */
310 static void
311 cgraph_varpool_remove_unreferenced_decls (void)
312 {
313 struct cgraph_varpool_node *next, *node = cgraph_varpool_nodes_queue;
314
315 cgraph_varpool_reset_queue ();
316
317 if (errorcount || sorrycount)
318 return;
319
320 while (node)
321 {
322 tree decl = node->decl;
323 next = node->next_needed;
324 node->needed = 0;
325
326 if (node->finalized
327 && ((DECL_ASSEMBLER_NAME_SET_P (decl)
328 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
329 || node->force_output
330 || decide_is_variable_needed (node, decl)
331 /* ??? Cgraph does not yet rule the world with an iron hand,
332 and does not control the emission of debug information.
333 After a variable has its DECL_RTL set, we must assume that
334 it may be referenced by the debug information, and we can
335 no longer elide it. */
336 || DECL_RTL_SET_P (decl)))
337 cgraph_varpool_mark_needed_node (node);
338
339 node = next;
340 }
341 /* Make sure we mark alias targets as used targets. */
342 finish_aliases_1 ();
343 cgraph_varpool_analyze_pending_decls ();
344 }
345
346
347 /* When not doing unit-at-a-time, output all functions enqueued.
348 Return true when such a functions were found. */
349
350 bool
351 cgraph_assemble_pending_functions (void)
352 {
353 bool output = false;
354
355 if (flag_unit_at_a_time)
356 return false;
357
358 cgraph_output_pending_asms ();
359
360 while (cgraph_nodes_queue)
361 {
362 struct cgraph_node *n = cgraph_nodes_queue;
363
364 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
365 n->next_needed = NULL;
366 if (!n->global.inlined_to
367 && !n->alias
368 && !DECL_EXTERNAL (n->decl))
369 {
370 cgraph_expand_function (n);
371 output = true;
372 }
373 }
374
375 /* Process CGRAPH_EXPAND_QUEUE, these are functions created during
376 the expansion process. Note that this queue may grow as its
377 being processed, as the new functions may generate new ones. */
378 while (cgraph_expand_queue)
379 {
380 struct cgraph_node *n = cgraph_expand_queue;
381 cgraph_expand_queue = cgraph_expand_queue->next_needed;
382 n->next_needed = NULL;
383 cgraph_finalize_function (n->decl, false);
384 output = true;
385 }
386
387 return output;
388 }
389
390
391 /* As an GCC extension we allow redefinition of the function. The
392 semantics when both copies of bodies differ is not well defined.
393 We replace the old body with new body so in unit at a time mode
394 we always use new body, while in normal mode we may end up with
395 old body inlined into some functions and new body expanded and
396 inlined in others.
397
398 ??? It may make more sense to use one body for inlining and other
399 body for expanding the function but this is difficult to do. */
400
401 static void
402 cgraph_reset_node (struct cgraph_node *node)
403 {
404 /* If node->output is set, then this is a unit-at-a-time compilation
405 and we have already begun whole-unit analysis. This is *not*
406 testing for whether we've already emitted the function. That
407 case can be sort-of legitimately seen with real function
408 redefinition errors. I would argue that the front end should
409 never present us with such a case, but don't enforce that for now. */
410 gcc_assert (!node->output);
411
412 /* Reset our data structures so we can analyze the function again. */
413 memset (&node->local, 0, sizeof (node->local));
414 memset (&node->global, 0, sizeof (node->global));
415 memset (&node->rtl, 0, sizeof (node->rtl));
416 node->analyzed = false;
417 node->local.redefined_extern_inline = true;
418 node->local.finalized = false;
419
420 if (!flag_unit_at_a_time)
421 {
422 struct cgraph_node *n, *next;
423
424 for (n = cgraph_nodes; n; n = next)
425 {
426 next = n->next;
427 if (n->global.inlined_to == node)
428 cgraph_remove_node (n);
429 }
430 }
431
432 cgraph_node_remove_callees (node);
433
434 /* We may need to re-queue the node for assembling in case
435 we already proceeded it and ignored as not needed. */
436 if (node->reachable && !flag_unit_at_a_time)
437 {
438 struct cgraph_node *n;
439
440 for (n = cgraph_nodes_queue; n; n = n->next_needed)
441 if (n == node)
442 break;
443 if (!n)
444 node->reachable = 0;
445 }
446 }
447
448 static void
449 cgraph_lower_function (struct cgraph_node *node)
450 {
451 if (node->lowered)
452 return;
453 tree_lowering_passes (node->decl);
454 node->lowered = true;
455 }
456
457 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
458 logic in effect. If NESTED is true, then our caller cannot stand to have
459 the garbage collector run at the moment. We would need to either create
460 a new GC context, or just not compile right now. */
461
462 void
463 cgraph_finalize_function (tree decl, bool nested)
464 {
465 struct cgraph_node *node = cgraph_node (decl);
466
467 if (node->local.finalized)
468 cgraph_reset_node (node);
469
470 notice_global_symbol (decl);
471 node->decl = decl;
472 node->local.finalized = true;
473 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
474 if (node->nested)
475 lower_nested_functions (decl);
476 gcc_assert (!node->nested);
477
478 /* If not unit at a time, then we need to create the call graph
479 now, so that called functions can be queued and emitted now. */
480 if (!flag_unit_at_a_time)
481 {
482 cgraph_analyze_function (node);
483 cgraph_decide_inlining_incrementally (node, false);
484 }
485
486 if (decide_is_function_needed (node, decl))
487 cgraph_mark_needed_node (node);
488
489 /* Since we reclaim unreachable nodes at the end of every language
490 level unit, we need to be conservative about possible entry points
491 there. */
492 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)))
493 cgraph_mark_reachable_node (node);
494
495 /* If not unit at a time, go ahead and emit everything we've found
496 to be reachable at this time. */
497 if (!nested)
498 {
499 if (!cgraph_assemble_pending_functions ())
500 ggc_collect ();
501 }
502
503 /* If we've not yet emitted decl, tell the debug info about it. */
504 if (!TREE_ASM_WRITTEN (decl))
505 (*debug_hooks->deferred_inline_function) (decl);
506
507 /* Possibly warn about unused parameters. */
508 if (warn_unused_parameter)
509 do_warn_unused_parameter (decl);
510 }
511
512 /* Walk tree and record all calls. Called via walk_tree. */
513 static tree
514 record_reference (tree *tp, int *walk_subtrees, void *data)
515 {
516 tree t = *tp;
517
518 switch (TREE_CODE (t))
519 {
520 case VAR_DECL:
521 /* ??? Really, we should mark this decl as *potentially* referenced
522 by this function and re-examine whether the decl is actually used
523 after rtl has been generated. */
524 if (TREE_STATIC (t) || DECL_EXTERNAL (t))
525 {
526 cgraph_varpool_mark_needed_node (cgraph_varpool_node (t));
527 if (lang_hooks.callgraph.analyze_expr)
528 return lang_hooks.callgraph.analyze_expr (tp, walk_subtrees,
529 data);
530 }
531 break;
532
533 case FDESC_EXPR:
534 case ADDR_EXPR:
535 if (flag_unit_at_a_time)
536 {
537 /* Record dereferences to the functions. This makes the
538 functions reachable unconditionally. */
539 tree decl = TREE_OPERAND (*tp, 0);
540 if (TREE_CODE (decl) == FUNCTION_DECL)
541 cgraph_mark_needed_node (cgraph_node (decl));
542 }
543 break;
544
545 default:
546 /* Save some cycles by not walking types and declaration as we
547 won't find anything useful there anyway. */
548 if (IS_TYPE_OR_DECL_P (*tp))
549 {
550 *walk_subtrees = 0;
551 break;
552 }
553
554 if ((unsigned int) TREE_CODE (t) >= LAST_AND_UNUSED_TREE_CODE)
555 return lang_hooks.callgraph.analyze_expr (tp, walk_subtrees, data);
556 break;
557 }
558
559 return NULL;
560 }
561
562 /* Create cgraph edges for function calls inside BODY from NODE. */
563
564 static void
565 cgraph_create_edges (struct cgraph_node *node, tree body)
566 {
567 basic_block bb;
568
569 struct function *this_cfun = DECL_STRUCT_FUNCTION (body);
570 block_stmt_iterator bsi;
571 tree step;
572 visited_nodes = pointer_set_create ();
573
574 /* Reach the trees by walking over the CFG, and note the
575 enclosing basic-blocks in the call edges. */
576 FOR_EACH_BB_FN (bb, this_cfun)
577 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
578 {
579 tree stmt = bsi_stmt (bsi);
580 tree call = get_call_expr_in (stmt);
581 tree decl;
582
583 if (call && (decl = get_callee_fndecl (call)))
584 {
585 cgraph_create_edge (node, cgraph_node (decl), stmt,
586 bb->count,
587 bb->loop_depth);
588 walk_tree (&TREE_OPERAND (call, 1),
589 record_reference, node, visited_nodes);
590 if (TREE_CODE (stmt) == MODIFY_EXPR)
591 walk_tree (&TREE_OPERAND (stmt, 0),
592 record_reference, node, visited_nodes);
593 }
594 else
595 walk_tree (bsi_stmt_ptr (bsi), record_reference, node, visited_nodes);
596 }
597
598 /* Look for initializers of constant variables and private statics. */
599 for (step = DECL_STRUCT_FUNCTION (body)->unexpanded_var_list;
600 step;
601 step = TREE_CHAIN (step))
602 {
603 tree decl = TREE_VALUE (step);
604 if (TREE_CODE (decl) == VAR_DECL
605 && (TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
606 && flag_unit_at_a_time)
607 cgraph_varpool_finalize_decl (decl);
608 else if (TREE_CODE (decl) == VAR_DECL && DECL_INITIAL (decl))
609 walk_tree (&DECL_INITIAL (decl), record_reference, node, visited_nodes);
610 }
611
612 pointer_set_destroy (visited_nodes);
613 visited_nodes = NULL;
614 }
615
616 /* Give initial reasons why inlining would fail. Those gets
617 either NULLified or usually overwritten by more precise reason
618 later. */
619 static void
620 initialize_inline_failed (struct cgraph_node *node)
621 {
622 struct cgraph_edge *e;
623
624 for (e = node->callers; e; e = e->next_caller)
625 {
626 gcc_assert (!e->callee->global.inlined_to);
627 gcc_assert (e->inline_failed);
628 if (node->local.redefined_extern_inline)
629 e->inline_failed = N_("redefined extern inline functions are not "
630 "considered for inlining");
631 else if (!node->local.inlinable)
632 e->inline_failed = N_("function not inlinable");
633 else
634 e->inline_failed = N_("function not considered for inlining");
635 }
636 }
637
638 /* Rebuild call edges from current function after a passes not aware
639 of cgraph updating. */
640 static unsigned int
641 rebuild_cgraph_edges (void)
642 {
643 basic_block bb;
644 struct cgraph_node *node = cgraph_node (current_function_decl);
645 block_stmt_iterator bsi;
646
647 cgraph_node_remove_callees (node);
648
649 node->count = ENTRY_BLOCK_PTR->count;
650
651 FOR_EACH_BB (bb)
652 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
653 {
654 tree stmt = bsi_stmt (bsi);
655 tree call = get_call_expr_in (stmt);
656 tree decl;
657
658 if (call && (decl = get_callee_fndecl (call)))
659 cgraph_create_edge (node, cgraph_node (decl), stmt,
660 bb->count,
661 bb->loop_depth);
662 }
663 initialize_inline_failed (node);
664 gcc_assert (!node->global.inlined_to);
665 return 0;
666 }
667
668 struct tree_opt_pass pass_rebuild_cgraph_edges =
669 {
670 NULL, /* name */
671 NULL, /* gate */
672 rebuild_cgraph_edges, /* execute */
673 NULL, /* sub */
674 NULL, /* next */
675 0, /* static_pass_number */
676 0, /* tv_id */
677 PROP_cfg, /* properties_required */
678 0, /* properties_provided */
679 0, /* properties_destroyed */
680 0, /* todo_flags_start */
681 0, /* todo_flags_finish */
682 0 /* letter */
683 };
684
685 /* Verify cgraph nodes of given cgraph node. */
686 void
687 verify_cgraph_node (struct cgraph_node *node)
688 {
689 struct cgraph_edge *e;
690 struct cgraph_node *main_clone;
691 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
692 basic_block this_block;
693 block_stmt_iterator bsi;
694 bool error_found = false;
695
696 timevar_push (TV_CGRAPH_VERIFY);
697 for (e = node->callees; e; e = e->next_callee)
698 if (e->aux)
699 {
700 error ("aux field set for edge %s->%s",
701 cgraph_node_name (e->caller), cgraph_node_name (e->callee));
702 error_found = true;
703 }
704 if (node->count < 0)
705 {
706 error ("Execution count is negative");
707 error_found = true;
708 }
709 for (e = node->callers; e; e = e->next_caller)
710 {
711 if (e->count < 0)
712 {
713 error ("caller edge count is negative");
714 error_found = true;
715 }
716 if (!e->inline_failed)
717 {
718 if (node->global.inlined_to
719 != (e->caller->global.inlined_to
720 ? e->caller->global.inlined_to : e->caller))
721 {
722 error ("inlined_to pointer is wrong");
723 error_found = true;
724 }
725 if (node->callers->next_caller)
726 {
727 error ("multiple inline callers");
728 error_found = true;
729 }
730 }
731 else
732 if (node->global.inlined_to)
733 {
734 error ("inlined_to pointer set for noninline callers");
735 error_found = true;
736 }
737 }
738 if (!node->callers && node->global.inlined_to)
739 {
740 error ("inlined_to pointer is set but no predecessors found");
741 error_found = true;
742 }
743 if (node->global.inlined_to == node)
744 {
745 error ("inlined_to pointer refers to itself");
746 error_found = true;
747 }
748
749 for (main_clone = cgraph_node (node->decl); main_clone;
750 main_clone = main_clone->next_clone)
751 if (main_clone == node)
752 break;
753 if (!cgraph_node (node->decl))
754 {
755 error ("node not found in cgraph_hash");
756 error_found = true;
757 }
758
759 if (node->analyzed
760 && DECL_SAVED_TREE (node->decl) && !TREE_ASM_WRITTEN (node->decl)
761 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to))
762 {
763 if (this_cfun->cfg)
764 {
765 /* The nodes we're interested in are never shared, so walk
766 the tree ignoring duplicates. */
767 visited_nodes = pointer_set_create ();
768 /* Reach the trees by walking over the CFG, and note the
769 enclosing basic-blocks in the call edges. */
770 FOR_EACH_BB_FN (this_block, this_cfun)
771 for (bsi = bsi_start (this_block); !bsi_end_p (bsi); bsi_next (&bsi))
772 {
773 tree stmt = bsi_stmt (bsi);
774 tree call = get_call_expr_in (stmt);
775 tree decl;
776 if (call && (decl = get_callee_fndecl (call)))
777 {
778 struct cgraph_edge *e = cgraph_edge (node, stmt);
779 if (e)
780 {
781 if (e->aux)
782 {
783 error ("shared call_stmt:");
784 debug_generic_stmt (stmt);
785 error_found = true;
786 }
787 if (e->callee->decl != cgraph_node (decl)->decl
788 && e->inline_failed)
789 {
790 error ("edge points to wrong declaration:");
791 debug_tree (e->callee->decl);
792 fprintf (stderr," Instead of:");
793 debug_tree (decl);
794 }
795 e->aux = (void *)1;
796 }
797 else
798 {
799 error ("missing callgraph edge for call stmt:");
800 debug_generic_stmt (stmt);
801 error_found = true;
802 }
803 }
804 }
805 pointer_set_destroy (visited_nodes);
806 visited_nodes = NULL;
807 }
808 else
809 /* No CFG available?! */
810 gcc_unreachable ();
811
812 for (e = node->callees; e; e = e->next_callee)
813 {
814 if (!e->aux)
815 {
816 error ("edge %s->%s has no corresponding call_stmt",
817 cgraph_node_name (e->caller),
818 cgraph_node_name (e->callee));
819 debug_generic_stmt (e->call_stmt);
820 error_found = true;
821 }
822 e->aux = 0;
823 }
824 }
825 if (error_found)
826 {
827 dump_cgraph_node (stderr, node);
828 internal_error ("verify_cgraph_node failed");
829 }
830 timevar_pop (TV_CGRAPH_VERIFY);
831 }
832
833 /* Verify whole cgraph structure. */
834 void
835 verify_cgraph (void)
836 {
837 struct cgraph_node *node;
838
839 if (sorrycount || errorcount)
840 return;
841
842 for (node = cgraph_nodes; node; node = node->next)
843 verify_cgraph_node (node);
844 }
845
846 /* Output one variable, if necessary. Return whether we output it. */
847 static bool
848 cgraph_varpool_assemble_decl (struct cgraph_varpool_node *node)
849 {
850 tree decl = node->decl;
851
852 if (!TREE_ASM_WRITTEN (decl)
853 && !node->alias
854 && !DECL_EXTERNAL (decl)
855 && (TREE_CODE (decl) != VAR_DECL || !DECL_HAS_VALUE_EXPR_P (decl)))
856 {
857 assemble_variable (decl, 0, 1, 0);
858 /* Local static variables are never seen by check_global_declarations
859 so we need to output debug info by hand. */
860 if (DECL_CONTEXT (decl)
861 && (TREE_CODE (DECL_CONTEXT (decl)) == BLOCK
862 || TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
863 && errorcount == 0 && sorrycount == 0)
864 {
865 timevar_push (TV_SYMOUT);
866 (*debug_hooks->global_decl) (decl);
867 timevar_pop (TV_SYMOUT);
868 }
869 return true;
870 }
871
872 return false;
873 }
874
875 /* Output all variables enqueued to be assembled. */
876 bool
877 cgraph_varpool_assemble_pending_decls (void)
878 {
879 bool changed = false;
880
881 if (errorcount || sorrycount)
882 return false;
883
884 /* EH might mark decls as needed during expansion. This should be safe since
885 we don't create references to new function, but it should not be used
886 elsewhere. */
887 cgraph_varpool_analyze_pending_decls ();
888
889 while (cgraph_varpool_nodes_queue)
890 {
891 struct cgraph_varpool_node *node = cgraph_varpool_nodes_queue;
892
893 cgraph_varpool_nodes_queue = cgraph_varpool_nodes_queue->next_needed;
894 if (cgraph_varpool_assemble_decl (node))
895 changed = true;
896 node->next_needed = NULL;
897 }
898 return changed;
899 }
900
901 /* Output all asm statements we have stored up to be output. */
902
903 static void
904 cgraph_output_pending_asms (void)
905 {
906 struct cgraph_asm_node *can;
907
908 if (errorcount || sorrycount)
909 return;
910
911 for (can = cgraph_asm_nodes; can; can = can->next)
912 assemble_asm (can->asm_str);
913 cgraph_asm_nodes = NULL;
914 }
915
916 /* Analyze the function scheduled to be output. */
917 void
918 cgraph_analyze_function (struct cgraph_node *node)
919 {
920 tree decl = node->decl;
921
922 current_function_decl = decl;
923 push_cfun (DECL_STRUCT_FUNCTION (decl));
924 cgraph_lower_function (node);
925
926 /* First kill forward declaration so reverse inlining works properly. */
927 cgraph_create_edges (node, decl);
928
929 node->local.inlinable = tree_inlinable_function_p (decl);
930 node->local.self_insns = estimate_num_insns (decl);
931 if (node->local.inlinable)
932 node->local.disregard_inline_limits
933 = lang_hooks.tree_inlining.disregard_inline_limits (decl);
934 initialize_inline_failed (node);
935 if (flag_really_no_inline && !node->local.disregard_inline_limits)
936 node->local.inlinable = 0;
937 /* Inlining characteristics are maintained by the cgraph_mark_inline. */
938 node->global.insns = node->local.self_insns;
939
940 node->analyzed = true;
941 pop_cfun ();
942 current_function_decl = NULL;
943 }
944
945 /* Look for externally_visible and used attributes and mark cgraph nodes
946 accordingly.
947
948 We cannot mark the nodes at the point the attributes are processed (in
949 handle_*_attribute) because the copy of the declarations available at that
950 point may not be canonical. For example, in:
951
952 void f();
953 void f() __attribute__((used));
954
955 the declaration we see in handle_used_attribute will be the second
956 declaration -- but the front end will subsequently merge that declaration
957 with the original declaration and discard the second declaration.
958
959 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
960
961 void f() {}
962 void f() __attribute__((externally_visible));
963
964 is valid.
965
966 So, we walk the nodes at the end of the translation unit, applying the
967 attributes at that point. */
968
969 static void
970 process_function_and_variable_attributes (struct cgraph_node *first,
971 struct cgraph_varpool_node *first_var)
972 {
973 struct cgraph_node *node;
974 struct cgraph_varpool_node *vnode;
975
976 for (node = cgraph_nodes; node != first; node = node->next)
977 {
978 tree decl = node->decl;
979 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
980 {
981 mark_decl_referenced (decl);
982 if (node->local.finalized)
983 cgraph_mark_needed_node (node);
984 }
985 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
986 {
987 if (node->local.finalized)
988 cgraph_mark_needed_node (node);
989 node->externally_visible = true;
990 }
991 }
992 for (vnode = cgraph_varpool_nodes; vnode != first_var; vnode = vnode->next)
993 {
994 tree decl = vnode->decl;
995 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
996 {
997 mark_decl_referenced (decl);
998 if (vnode->finalized)
999 cgraph_varpool_mark_needed_node (vnode);
1000 }
1001 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
1002 {
1003 if (vnode->finalized)
1004 cgraph_varpool_mark_needed_node (vnode);
1005 vnode->externally_visible = true;
1006 }
1007 }
1008 }
1009
1010 /* Analyze the whole compilation unit once it is parsed completely. */
1011
1012 void
1013 cgraph_finalize_compilation_unit (void)
1014 {
1015 struct cgraph_node *node, *next;
1016 /* Keep track of already processed nodes when called multiple times for
1017 intermodule optimization. */
1018 static struct cgraph_node *first_analyzed;
1019 static struct cgraph_varpool_node *first_analyzed_var;
1020
1021 if (errorcount || sorrycount)
1022 return;
1023
1024 finish_aliases_1 ();
1025
1026 if (!flag_unit_at_a_time)
1027 {
1028 cgraph_output_pending_asms ();
1029 cgraph_assemble_pending_functions ();
1030 return;
1031 }
1032
1033 if (!quiet_flag)
1034 {
1035 fprintf (stderr, "\nAnalyzing compilation unit");
1036 fflush (stderr);
1037 }
1038
1039 timevar_push (TV_CGRAPH);
1040 process_function_and_variable_attributes (first_analyzed, first_analyzed_var);
1041 cgraph_varpool_analyze_pending_decls ();
1042 if (cgraph_dump_file)
1043 {
1044 fprintf (cgraph_dump_file, "Initial entry points:");
1045 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1046 if (node->needed && DECL_SAVED_TREE (node->decl))
1047 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1048 fprintf (cgraph_dump_file, "\n");
1049 }
1050
1051 /* Propagate reachability flag and lower representation of all reachable
1052 functions. In the future, lowering will introduce new functions and
1053 new entry points on the way (by template instantiation and virtual
1054 method table generation for instance). */
1055 while (cgraph_nodes_queue)
1056 {
1057 struct cgraph_edge *edge;
1058 tree decl = cgraph_nodes_queue->decl;
1059
1060 node = cgraph_nodes_queue;
1061 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
1062 node->next_needed = NULL;
1063
1064 /* ??? It is possible to create extern inline function and later using
1065 weak alias attribute to kill its body. See
1066 gcc.c-torture/compile/20011119-1.c */
1067 if (!DECL_SAVED_TREE (decl))
1068 {
1069 cgraph_reset_node (node);
1070 continue;
1071 }
1072
1073 gcc_assert (!node->analyzed && node->reachable);
1074 gcc_assert (DECL_SAVED_TREE (decl));
1075
1076 cgraph_analyze_function (node);
1077
1078 for (edge = node->callees; edge; edge = edge->next_callee)
1079 if (!edge->callee->reachable)
1080 cgraph_mark_reachable_node (edge->callee);
1081
1082 cgraph_varpool_analyze_pending_decls ();
1083 }
1084
1085 /* Collect entry points to the unit. */
1086
1087 if (cgraph_dump_file)
1088 {
1089 fprintf (cgraph_dump_file, "Unit entry points:");
1090 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1091 if (node->needed && DECL_SAVED_TREE (node->decl))
1092 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1093 fprintf (cgraph_dump_file, "\n\nInitial ");
1094 dump_cgraph (cgraph_dump_file);
1095 }
1096
1097 if (cgraph_dump_file)
1098 fprintf (cgraph_dump_file, "\nReclaiming functions:");
1099
1100 for (node = cgraph_nodes; node != first_analyzed; node = next)
1101 {
1102 tree decl = node->decl;
1103 next = node->next;
1104
1105 if (node->local.finalized && !DECL_SAVED_TREE (decl))
1106 cgraph_reset_node (node);
1107
1108 if (!node->reachable && DECL_SAVED_TREE (decl))
1109 {
1110 if (cgraph_dump_file)
1111 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1112 cgraph_remove_node (node);
1113 continue;
1114 }
1115 else
1116 node->next_needed = NULL;
1117 gcc_assert (!node->local.finalized || DECL_SAVED_TREE (decl));
1118 gcc_assert (node->analyzed == node->local.finalized);
1119 }
1120 if (cgraph_dump_file)
1121 {
1122 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1123 dump_cgraph (cgraph_dump_file);
1124 }
1125 first_analyzed = cgraph_nodes;
1126 first_analyzed_var = cgraph_varpool_nodes;
1127 ggc_collect ();
1128 timevar_pop (TV_CGRAPH);
1129 }
1130 /* Figure out what functions we want to assemble. */
1131
1132 static void
1133 cgraph_mark_functions_to_output (void)
1134 {
1135 struct cgraph_node *node;
1136
1137 for (node = cgraph_nodes; node; node = node->next)
1138 {
1139 tree decl = node->decl;
1140 struct cgraph_edge *e;
1141
1142 gcc_assert (!node->output);
1143
1144 for (e = node->callers; e; e = e->next_caller)
1145 if (e->inline_failed)
1146 break;
1147
1148 /* We need to output all local functions that are used and not
1149 always inlined, as well as those that are reachable from
1150 outside the current compilation unit. */
1151 if (DECL_SAVED_TREE (decl)
1152 && !node->global.inlined_to
1153 && (node->needed
1154 || (e && node->reachable))
1155 && !TREE_ASM_WRITTEN (decl)
1156 && !DECL_EXTERNAL (decl))
1157 node->output = 1;
1158 else
1159 {
1160 /* We should've reclaimed all functions that are not needed. */
1161 #ifdef ENABLE_CHECKING
1162 if (!node->global.inlined_to && DECL_SAVED_TREE (decl)
1163 && !DECL_EXTERNAL (decl))
1164 {
1165 dump_cgraph_node (stderr, node);
1166 internal_error ("failed to reclaim unneeded function");
1167 }
1168 #endif
1169 gcc_assert (node->global.inlined_to || !DECL_SAVED_TREE (decl)
1170 || DECL_EXTERNAL (decl));
1171
1172 }
1173
1174 }
1175 }
1176
1177 /* Expand function specified by NODE. */
1178
1179 static void
1180 cgraph_expand_function (struct cgraph_node *node)
1181 {
1182 tree decl = node->decl;
1183
1184 /* We ought to not compile any inline clones. */
1185 gcc_assert (!node->global.inlined_to);
1186
1187 if (flag_unit_at_a_time)
1188 announce_function (decl);
1189
1190 cgraph_lower_function (node);
1191
1192 /* Generate RTL for the body of DECL. */
1193 lang_hooks.callgraph.expand_function (decl);
1194
1195 /* Make sure that BE didn't give up on compiling. */
1196 /* ??? Can happen with nested function of extern inline. */
1197 gcc_assert (TREE_ASM_WRITTEN (node->decl));
1198
1199 current_function_decl = NULL;
1200 if (!cgraph_preserve_function_body_p (node->decl))
1201 {
1202 DECL_SAVED_TREE (node->decl) = NULL;
1203 DECL_STRUCT_FUNCTION (node->decl) = NULL;
1204 DECL_INITIAL (node->decl) = error_mark_node;
1205 /* Eliminate all call edges. This is important so the call_expr no longer
1206 points to the dead function body. */
1207 cgraph_node_remove_callees (node);
1208 }
1209
1210 cgraph_function_flags_ready = true;
1211 }
1212
1213 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
1214
1215 bool
1216 cgraph_inline_p (struct cgraph_edge *e, const char **reason)
1217 {
1218 *reason = e->inline_failed;
1219 return !e->inline_failed;
1220 }
1221
1222
1223
1224 /* Expand all functions that must be output.
1225
1226 Attempt to topologically sort the nodes so function is output when
1227 all called functions are already assembled to allow data to be
1228 propagated across the callgraph. Use a stack to get smaller distance
1229 between a function and its callees (later we may choose to use a more
1230 sophisticated algorithm for function reordering; we will likely want
1231 to use subsections to make the output functions appear in top-down
1232 order). */
1233
1234 static void
1235 cgraph_expand_all_functions (void)
1236 {
1237 struct cgraph_node *node;
1238 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1239 int order_pos = 0, new_order_pos = 0;
1240 int i;
1241
1242 order_pos = cgraph_postorder (order);
1243 gcc_assert (order_pos == cgraph_n_nodes);
1244
1245 /* Garbage collector may remove inline clones we eliminate during
1246 optimization. So we must be sure to not reference them. */
1247 for (i = 0; i < order_pos; i++)
1248 if (order[i]->output)
1249 order[new_order_pos++] = order[i];
1250
1251 for (i = new_order_pos - 1; i >= 0; i--)
1252 {
1253 node = order[i];
1254 if (node->output)
1255 {
1256 gcc_assert (node->reachable);
1257 node->output = 0;
1258 cgraph_expand_function (node);
1259 }
1260 }
1261
1262 free (order);
1263
1264 /* Process CGRAPH_EXPAND_QUEUE, these are functions created during
1265 the expansion process. Note that this queue may grow as its
1266 being processed, as the new functions may generate new ones. */
1267 while (cgraph_expand_queue)
1268 {
1269 node = cgraph_expand_queue;
1270 cgraph_expand_queue = cgraph_expand_queue->next_needed;
1271 node->next_needed = NULL;
1272 node->output = 0;
1273 node->lowered = DECL_STRUCT_FUNCTION (node->decl)->cfg != NULL;
1274 cgraph_expand_function (node);
1275 }
1276 }
1277
1278 /* This is used to sort the node types by the cgraph order number. */
1279
1280 struct cgraph_order_sort
1281 {
1282 enum { ORDER_UNDEFINED = 0, ORDER_FUNCTION, ORDER_VAR, ORDER_ASM } kind;
1283 union
1284 {
1285 struct cgraph_node *f;
1286 struct cgraph_varpool_node *v;
1287 struct cgraph_asm_node *a;
1288 } u;
1289 };
1290
1291 /* Output all functions, variables, and asm statements in the order
1292 according to their order fields, which is the order in which they
1293 appeared in the file. This implements -fno-toplevel-reorder. In
1294 this mode we may output functions and variables which don't really
1295 need to be output. */
1296
1297 static void
1298 cgraph_output_in_order (void)
1299 {
1300 int max;
1301 size_t size;
1302 struct cgraph_order_sort *nodes;
1303 int i;
1304 struct cgraph_node *pf;
1305 struct cgraph_varpool_node *pv;
1306 struct cgraph_asm_node *pa;
1307
1308 max = cgraph_order;
1309 size = max * sizeof (struct cgraph_order_sort);
1310 nodes = (struct cgraph_order_sort *) alloca (size);
1311 memset (nodes, 0, size);
1312
1313 cgraph_varpool_analyze_pending_decls ();
1314
1315 for (pf = cgraph_nodes; pf; pf = pf->next)
1316 {
1317 if (pf->output)
1318 {
1319 i = pf->order;
1320 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1321 nodes[i].kind = ORDER_FUNCTION;
1322 nodes[i].u.f = pf;
1323 }
1324 }
1325
1326 for (pv = cgraph_varpool_nodes_queue; pv; pv = pv->next_needed)
1327 {
1328 i = pv->order;
1329 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1330 nodes[i].kind = ORDER_VAR;
1331 nodes[i].u.v = pv;
1332 }
1333
1334 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1335 {
1336 i = pa->order;
1337 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1338 nodes[i].kind = ORDER_ASM;
1339 nodes[i].u.a = pa;
1340 }
1341
1342 for (i = 0; i < max; ++i)
1343 {
1344 switch (nodes[i].kind)
1345 {
1346 case ORDER_FUNCTION:
1347 nodes[i].u.f->output = 0;
1348 cgraph_expand_function (nodes[i].u.f);
1349 break;
1350
1351 case ORDER_VAR:
1352 cgraph_varpool_assemble_decl (nodes[i].u.v);
1353 break;
1354
1355 case ORDER_ASM:
1356 assemble_asm (nodes[i].u.a->asm_str);
1357 break;
1358
1359 case ORDER_UNDEFINED:
1360 break;
1361
1362 default:
1363 gcc_unreachable ();
1364 }
1365 }
1366
1367 cgraph_asm_nodes = NULL;
1368 }
1369
1370 /* Mark visibility of all functions.
1371
1372 A local function is one whose calls can occur only in the current
1373 compilation unit and all its calls are explicit, so we can change
1374 its calling convention. We simply mark all static functions whose
1375 address is not taken as local.
1376
1377 We also change the TREE_PUBLIC flag of all declarations that are public
1378 in language point of view but we want to overwrite this default
1379 via visibilities for the backend point of view. */
1380
1381 static void
1382 cgraph_function_and_variable_visibility (void)
1383 {
1384 struct cgraph_node *node;
1385 struct cgraph_varpool_node *vnode;
1386
1387 for (node = cgraph_nodes; node; node = node->next)
1388 {
1389 if (node->reachable
1390 && (DECL_COMDAT (node->decl)
1391 || (!flag_whole_program
1392 && TREE_PUBLIC (node->decl) && !DECL_EXTERNAL (node->decl))))
1393 node->local.externally_visible = true;
1394 if (!node->local.externally_visible && node->analyzed
1395 && !DECL_EXTERNAL (node->decl))
1396 {
1397 gcc_assert (flag_whole_program || !TREE_PUBLIC (node->decl));
1398 TREE_PUBLIC (node->decl) = 0;
1399 }
1400 node->local.local = (!node->needed
1401 && node->analyzed
1402 && !DECL_EXTERNAL (node->decl)
1403 && !node->local.externally_visible);
1404 }
1405 for (vnode = cgraph_varpool_nodes_queue; vnode; vnode = vnode->next_needed)
1406 {
1407 if (vnode->needed
1408 && !flag_whole_program
1409 && (DECL_COMDAT (vnode->decl) || TREE_PUBLIC (vnode->decl)))
1410 vnode->externally_visible = 1;
1411 if (!vnode->externally_visible)
1412 {
1413 gcc_assert (flag_whole_program || !TREE_PUBLIC (vnode->decl));
1414 TREE_PUBLIC (vnode->decl) = 0;
1415 }
1416 gcc_assert (TREE_STATIC (vnode->decl));
1417 }
1418
1419 /* Because we have to be conservative on the boundaries of source
1420 level units, it is possible that we marked some functions in
1421 reachable just because they might be used later via external
1422 linkage, but after making them local they are really unreachable
1423 now. */
1424 cgraph_remove_unreachable_nodes (true, cgraph_dump_file);
1425
1426 if (cgraph_dump_file)
1427 {
1428 fprintf (cgraph_dump_file, "\nMarking local functions:");
1429 for (node = cgraph_nodes; node; node = node->next)
1430 if (node->local.local)
1431 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1432 fprintf (cgraph_dump_file, "\n\n");
1433 fprintf (cgraph_dump_file, "\nMarking externally visible functions:");
1434 for (node = cgraph_nodes; node; node = node->next)
1435 if (node->local.externally_visible)
1436 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1437 fprintf (cgraph_dump_file, "\n\n");
1438 }
1439 cgraph_function_flags_ready = true;
1440 }
1441
1442 /* Return true when function body of DECL still needs to be kept around
1443 for later re-use. */
1444 bool
1445 cgraph_preserve_function_body_p (tree decl)
1446 {
1447 struct cgraph_node *node;
1448 if (!cgraph_global_info_ready)
1449 return (DECL_INLINE (decl) && !flag_really_no_inline);
1450 /* Look if there is any clone around. */
1451 for (node = cgraph_node (decl); node; node = node->next_clone)
1452 if (node->global.inlined_to)
1453 return true;
1454 return false;
1455 }
1456
1457 static void
1458 ipa_passes (void)
1459 {
1460 cfun = NULL;
1461 tree_register_cfg_hooks ();
1462 bitmap_obstack_initialize (NULL);
1463 execute_ipa_pass_list (all_ipa_passes);
1464 bitmap_obstack_release (NULL);
1465 }
1466
1467 /* Perform simple optimizations based on callgraph. */
1468
1469 void
1470 cgraph_optimize (void)
1471 {
1472 if (errorcount || sorrycount)
1473 return;
1474
1475 #ifdef ENABLE_CHECKING
1476 verify_cgraph ();
1477 #endif
1478 if (!flag_unit_at_a_time)
1479 {
1480 cgraph_output_pending_asms ();
1481 cgraph_varpool_assemble_pending_decls ();
1482 return;
1483 }
1484
1485 process_pending_assemble_externals ();
1486
1487 /* Frontend may output common variables after the unit has been finalized.
1488 It is safe to deal with them here as they are always zero initialized. */
1489 cgraph_varpool_analyze_pending_decls ();
1490
1491 timevar_push (TV_CGRAPHOPT);
1492 if (!quiet_flag)
1493 fprintf (stderr, "Performing intraprocedural optimizations\n");
1494
1495 cgraph_function_and_variable_visibility ();
1496 if (cgraph_dump_file)
1497 {
1498 fprintf (cgraph_dump_file, "Marked ");
1499 dump_cgraph (cgraph_dump_file);
1500 }
1501
1502 /* Don't run the IPA passes if there was any error or sorry messages. */
1503 if (errorcount == 0 && sorrycount == 0)
1504 ipa_passes ();
1505
1506 /* This pass remove bodies of extern inline functions we never inlined.
1507 Do this later so other IPA passes see what is really going on. */
1508 cgraph_remove_unreachable_nodes (false, dump_file);
1509 cgraph_global_info_ready = true;
1510 if (cgraph_dump_file)
1511 {
1512 fprintf (cgraph_dump_file, "Optimized ");
1513 dump_cgraph (cgraph_dump_file);
1514 dump_varpool (cgraph_dump_file);
1515 }
1516 timevar_pop (TV_CGRAPHOPT);
1517
1518 /* Output everything. */
1519 if (!quiet_flag)
1520 fprintf (stderr, "Assembling functions:\n");
1521 #ifdef ENABLE_CHECKING
1522 verify_cgraph ();
1523 #endif
1524
1525 cgraph_mark_functions_to_output ();
1526
1527 if (!flag_toplevel_reorder)
1528 cgraph_output_in_order ();
1529 else
1530 {
1531 cgraph_output_pending_asms ();
1532
1533 cgraph_expand_all_functions ();
1534 cgraph_varpool_remove_unreferenced_decls ();
1535
1536 cgraph_varpool_assemble_pending_decls ();
1537 }
1538
1539 if (cgraph_dump_file)
1540 {
1541 fprintf (cgraph_dump_file, "\nFinal ");
1542 dump_cgraph (cgraph_dump_file);
1543 }
1544 #ifdef ENABLE_CHECKING
1545 verify_cgraph ();
1546 /* Double check that all inline clones are gone and that all
1547 function bodies have been released from memory. */
1548 if (flag_unit_at_a_time
1549 && !(sorrycount || errorcount))
1550 {
1551 struct cgraph_node *node;
1552 bool error_found = false;
1553
1554 for (node = cgraph_nodes; node; node = node->next)
1555 if (node->analyzed
1556 && (node->global.inlined_to
1557 || DECL_SAVED_TREE (node->decl)))
1558 {
1559 error_found = true;
1560 dump_cgraph_node (stderr, node);
1561 }
1562 if (error_found)
1563 internal_error ("nodes with no released memory found");
1564 }
1565 #endif
1566 }
1567
1568 /* Generate and emit a static constructor or destructor. WHICH must be
1569 one of 'I' or 'D'. BODY should be a STATEMENT_LIST containing
1570 GENERIC statements. */
1571
1572 void
1573 cgraph_build_static_cdtor (char which, tree body, int priority)
1574 {
1575 static int counter = 0;
1576 char which_buf[16];
1577 tree decl, name, resdecl;
1578
1579 sprintf (which_buf, "%c_%d", which, counter++);
1580 name = get_file_function_name_long (which_buf);
1581
1582 decl = build_decl (FUNCTION_DECL, name,
1583 build_function_type (void_type_node, void_list_node));
1584 current_function_decl = decl;
1585
1586 resdecl = build_decl (RESULT_DECL, NULL_TREE, void_type_node);
1587 DECL_ARTIFICIAL (resdecl) = 1;
1588 DECL_IGNORED_P (resdecl) = 1;
1589 DECL_RESULT (decl) = resdecl;
1590
1591 allocate_struct_function (decl);
1592
1593 TREE_STATIC (decl) = 1;
1594 TREE_USED (decl) = 1;
1595 DECL_ARTIFICIAL (decl) = 1;
1596 DECL_IGNORED_P (decl) = 1;
1597 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1598 DECL_SAVED_TREE (decl) = body;
1599 TREE_PUBLIC (decl) = ! targetm.have_ctors_dtors;
1600 DECL_UNINLINABLE (decl) = 1;
1601
1602 DECL_INITIAL (decl) = make_node (BLOCK);
1603 TREE_USED (DECL_INITIAL (decl)) = 1;
1604
1605 DECL_SOURCE_LOCATION (decl) = input_location;
1606 cfun->function_end_locus = input_location;
1607
1608 switch (which)
1609 {
1610 case 'I':
1611 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1612 break;
1613 case 'D':
1614 DECL_STATIC_DESTRUCTOR (decl) = 1;
1615 break;
1616 default:
1617 gcc_unreachable ();
1618 }
1619
1620 gimplify_function_tree (decl);
1621
1622 /* ??? We will get called LATE in the compilation process. */
1623 if (cgraph_global_info_ready)
1624 {
1625 tree_lowering_passes (decl);
1626 tree_rest_of_compilation (decl);
1627 }
1628 else
1629 cgraph_finalize_function (decl, 0);
1630
1631 if (targetm.have_ctors_dtors)
1632 {
1633 void (*fn) (rtx, int);
1634
1635 if (which == 'I')
1636 fn = targetm.asm_out.constructor;
1637 else
1638 fn = targetm.asm_out.destructor;
1639 fn (XEXP (DECL_RTL (decl), 0), priority);
1640 }
1641 }
1642
1643 void
1644 init_cgraph (void)
1645 {
1646 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1647 }
1648
1649 /* The edges representing the callers of the NEW_VERSION node were
1650 fixed by cgraph_function_versioning (), now the call_expr in their
1651 respective tree code should be updated to call the NEW_VERSION. */
1652
1653 static void
1654 update_call_expr (struct cgraph_node *new_version)
1655 {
1656 struct cgraph_edge *e;
1657
1658 gcc_assert (new_version);
1659 for (e = new_version->callers; e; e = e->next_caller)
1660 /* Update the call expr on the edges
1661 to call the new version. */
1662 TREE_OPERAND (TREE_OPERAND (get_call_expr_in (e->call_stmt), 0), 0) = new_version->decl;
1663 }
1664
1665
1666 /* Create a new cgraph node which is the new version of
1667 OLD_VERSION node. REDIRECT_CALLERS holds the callers
1668 edges which should be redirected to point to
1669 NEW_VERSION. ALL the callees edges of OLD_VERSION
1670 are cloned to the new version node. Return the new
1671 version node. */
1672
1673 static struct cgraph_node *
1674 cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
1675 tree new_decl,
1676 VEC(cgraph_edge_p,heap) *redirect_callers)
1677 {
1678 struct cgraph_node *new_version;
1679 struct cgraph_edge *e, *new_e;
1680 struct cgraph_edge *next_callee;
1681 unsigned i;
1682
1683 gcc_assert (old_version);
1684
1685 new_version = cgraph_node (new_decl);
1686
1687 new_version->analyzed = true;
1688 new_version->local = old_version->local;
1689 new_version->global = old_version->global;
1690 new_version->rtl = new_version->rtl;
1691 new_version->reachable = true;
1692 new_version->count = old_version->count;
1693
1694 /* Clone the old node callees. Recursive calls are
1695 also cloned. */
1696 for (e = old_version->callees;e; e=e->next_callee)
1697 {
1698 new_e = cgraph_clone_edge (e, new_version, e->call_stmt, 0, e->loop_nest, true);
1699 new_e->count = e->count;
1700 }
1701 /* Fix recursive calls.
1702 If OLD_VERSION has a recursive call after the
1703 previous edge cloning, the new version will have an edge
1704 pointing to the old version, which is wrong;
1705 Redirect it to point to the new version. */
1706 for (e = new_version->callees ; e; e = next_callee)
1707 {
1708 next_callee = e->next_callee;
1709 if (e->callee == old_version)
1710 cgraph_redirect_edge_callee (e, new_version);
1711
1712 if (!next_callee)
1713 break;
1714 }
1715 for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
1716 {
1717 /* Redirect calls to the old version node to point to its new
1718 version. */
1719 cgraph_redirect_edge_callee (e, new_version);
1720 }
1721
1722 return new_version;
1723 }
1724
1725 /* Perform function versioning.
1726 Function versioning includes copying of the tree and
1727 a callgraph update (creating a new cgraph node and updating
1728 its callees and callers).
1729
1730 REDIRECT_CALLERS varray includes the edges to be redirected
1731 to the new version.
1732
1733 TREE_MAP is a mapping of tree nodes we want to replace with
1734 new ones (according to results of prior analysis).
1735 OLD_VERSION_NODE is the node that is versioned.
1736 It returns the new version's cgraph node. */
1737
1738 struct cgraph_node *
1739 cgraph_function_versioning (struct cgraph_node *old_version_node,
1740 VEC(cgraph_edge_p,heap) *redirect_callers,
1741 varray_type tree_map)
1742 {
1743 tree old_decl = old_version_node->decl;
1744 struct cgraph_node *new_version_node = NULL;
1745 tree new_decl;
1746
1747 if (!tree_versionable_function_p (old_decl))
1748 return NULL;
1749
1750 /* Make a new FUNCTION_DECL tree node for the
1751 new version. */
1752 new_decl = copy_node (old_decl);
1753
1754 /* Create the new version's call-graph node.
1755 and update the edges of the new node. */
1756 new_version_node =
1757 cgraph_copy_node_for_versioning (old_version_node, new_decl,
1758 redirect_callers);
1759
1760 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1761 tree_function_versioning (old_decl, new_decl, tree_map, false);
1762 /* Update the call_expr on the edges to call the new version node. */
1763 update_call_expr (new_version_node);
1764
1765 /* Update the new version's properties.
1766 Make The new version visible only within this translation unit.
1767 ??? We cannot use COMDAT linkage because there is no
1768 ABI support for this. */
1769 DECL_EXTERNAL (new_version_node->decl) = 0;
1770 DECL_ONE_ONLY (new_version_node->decl) = 0;
1771 TREE_PUBLIC (new_version_node->decl) = 0;
1772 DECL_COMDAT (new_version_node->decl) = 0;
1773 new_version_node->local.externally_visible = 0;
1774 new_version_node->local.local = 1;
1775 new_version_node->lowered = true;
1776 return new_version_node;
1777 }
1778
1779 /* Produce separate function body for inline clones so the offline copy can be
1780 modified without affecting them. */
1781 struct cgraph_node *
1782 save_inline_function_body (struct cgraph_node *node)
1783 {
1784 struct cgraph_node *first_clone;
1785
1786 gcc_assert (node == cgraph_node (node->decl));
1787
1788 cgraph_lower_function (node);
1789
1790 /* In non-unit-at-a-time we construct full fledged clone we never output to
1791 assembly file. This clone is pointed out by inline_decl of original function
1792 and inlining infrastructure knows how to deal with this. */
1793 if (!flag_unit_at_a_time)
1794 {
1795 struct cgraph_edge *e;
1796
1797 first_clone = cgraph_clone_node (node, node->count, 0, false);
1798 first_clone->needed = 0;
1799 first_clone->reachable = 1;
1800 /* Recursively clone all bodies. */
1801 for (e = first_clone->callees; e; e = e->next_callee)
1802 if (!e->inline_failed)
1803 cgraph_clone_inlined_nodes (e, true, false);
1804 }
1805 else
1806 first_clone = node->next_clone;
1807
1808 first_clone->decl = copy_node (node->decl);
1809 node->next_clone = NULL;
1810 if (!flag_unit_at_a_time)
1811 node->inline_decl = first_clone->decl;
1812 first_clone->prev_clone = NULL;
1813 cgraph_insert_node_to_hashtable (first_clone);
1814 gcc_assert (first_clone == cgraph_node (first_clone->decl));
1815
1816 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1817 tree_function_versioning (node->decl, first_clone->decl, NULL, true);
1818
1819 DECL_EXTERNAL (first_clone->decl) = 0;
1820 DECL_ONE_ONLY (first_clone->decl) = 0;
1821 TREE_PUBLIC (first_clone->decl) = 0;
1822 DECL_COMDAT (first_clone->decl) = 0;
1823
1824 for (node = first_clone->next_clone; node; node = node->next_clone)
1825 node->decl = first_clone->decl;
1826 #ifdef ENABLE_CHECKING
1827 verify_cgraph_node (first_clone);
1828 #endif
1829 return first_clone;
1830 }
1831