* cgraphunit.c (cgraph_optimize): Maintain timevar stack properly.
[gcc.git] / gcc / cgraphunit.c
1 /* Callgraph based interprocedural optimizations.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This module implements main driver of compilation process as well as
23 few basic interprocedural optimizers.
24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
35 (There is one exception needed for implementing GCC extern inline
36 function.)
37
38 - varpool_finalize_variable
39
40 This function has same behavior as the above but is used for static
41 variables.
42
43 - cgraph_finalize_compilation_unit
44
45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
47
48 In the the call-graph construction and local function
49 analysis takes place here. Bodies of unreachable functions are released
50 to conserve memory usage.
51
52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
54
55 - cgraph_optimize
56
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
61
62 - cgraph_mark_needed_node
63 - varpool_mark_needed_node
64
65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
69 used by C++ frontend to explicitly mark the keyed methods.
70
71 - analyze_expr callback
72
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
76
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
79
80 Analyzing of all functions is deferred
81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
82
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
90
91 The intra-procedural information is produced and its existence
92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
95
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
98
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
103
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
106 where reference has been optimized out. */
107
108
109 #include "config.h"
110 #include "system.h"
111 #include "coretypes.h"
112 #include "tm.h"
113 #include "tree.h"
114 #include "rtl.h"
115 #include "tree-flow.h"
116 #include "tree-inline.h"
117 #include "langhooks.h"
118 #include "pointer-set.h"
119 #include "toplev.h"
120 #include "flags.h"
121 #include "ggc.h"
122 #include "debug.h"
123 #include "target.h"
124 #include "cgraph.h"
125 #include "diagnostic.h"
126 #include "timevar.h"
127 #include "params.h"
128 #include "fibheap.h"
129 #include "intl.h"
130 #include "function.h"
131 #include "ipa-prop.h"
132 #include "gimple.h"
133 #include "tree-iterator.h"
134 #include "tree-pass.h"
135 #include "tree-dump.h"
136 #include "output.h"
137 #include "coverage.h"
138
139 static void cgraph_expand_all_functions (void);
140 static void cgraph_mark_functions_to_output (void);
141 static void cgraph_expand_function (struct cgraph_node *);
142 static void cgraph_output_pending_asms (void);
143 static void cgraph_analyze_function (struct cgraph_node *);
144
145 static FILE *cgraph_dump_file;
146
147 /* A vector of FUNCTION_DECLs declared as static constructors. */
148 static GTY (()) VEC(tree, gc) *static_ctors;
149 /* A vector of FUNCTION_DECLs declared as static destructors. */
150 static GTY (()) VEC(tree, gc) *static_dtors;
151
152 /* When target does not have ctors and dtors, we call all constructor
153 and destructor by special initialization/destruction function
154 recognized by collect2.
155
156 When we are going to build this function, collect all constructors and
157 destructors and turn them into normal functions. */
158
159 static void
160 record_cdtor_fn (tree fndecl)
161 {
162 struct cgraph_node *node;
163 if (targetm.have_ctors_dtors
164 || (!DECL_STATIC_CONSTRUCTOR (fndecl)
165 && !DECL_STATIC_DESTRUCTOR (fndecl)))
166 return;
167
168 if (DECL_STATIC_CONSTRUCTOR (fndecl))
169 {
170 VEC_safe_push (tree, gc, static_ctors, fndecl);
171 DECL_STATIC_CONSTRUCTOR (fndecl) = 0;
172 }
173 if (DECL_STATIC_DESTRUCTOR (fndecl))
174 {
175 VEC_safe_push (tree, gc, static_dtors, fndecl);
176 DECL_STATIC_DESTRUCTOR (fndecl) = 0;
177 }
178 node = cgraph_node (fndecl);
179 node->local.disregard_inline_limits = 1;
180 cgraph_mark_reachable_node (node);
181 }
182
183 /* Define global constructors/destructor functions for the CDTORS, of
184 which they are LEN. The CDTORS are sorted by initialization
185 priority. If CTOR_P is true, these are constructors; otherwise,
186 they are destructors. */
187
188 static void
189 build_cdtor (bool ctor_p, tree *cdtors, size_t len)
190 {
191 size_t i;
192
193 i = 0;
194 while (i < len)
195 {
196 tree body;
197 tree fn;
198 priority_type priority;
199
200 priority = 0;
201 body = NULL_TREE;
202 /* Find the next batch of constructors/destructors with the same
203 initialization priority. */
204 do
205 {
206 priority_type p;
207 fn = cdtors[i];
208 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
209 if (!body)
210 priority = p;
211 else if (p != priority)
212 break;
213 append_to_statement_list (build_function_call_expr (UNKNOWN_LOCATION,
214 fn, 0),
215 &body);
216 ++i;
217 }
218 while (i < len);
219 gcc_assert (body != NULL_TREE);
220 /* Generate a function to call all the function of like
221 priority. */
222 cgraph_build_static_cdtor (ctor_p ? 'I' : 'D', body, priority);
223 }
224 }
225
226 /* Comparison function for qsort. P1 and P2 are actually of type
227 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
228 used to determine the sort order. */
229
230 static int
231 compare_ctor (const void *p1, const void *p2)
232 {
233 tree f1;
234 tree f2;
235 int priority1;
236 int priority2;
237
238 f1 = *(const tree *)p1;
239 f2 = *(const tree *)p2;
240 priority1 = DECL_INIT_PRIORITY (f1);
241 priority2 = DECL_INIT_PRIORITY (f2);
242
243 if (priority1 < priority2)
244 return -1;
245 else if (priority1 > priority2)
246 return 1;
247 else
248 /* Ensure a stable sort. */
249 return (const tree *)p1 - (const tree *)p2;
250 }
251
252 /* Comparison function for qsort. P1 and P2 are actually of type
253 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
254 used to determine the sort order. */
255
256 static int
257 compare_dtor (const void *p1, const void *p2)
258 {
259 tree f1;
260 tree f2;
261 int priority1;
262 int priority2;
263
264 f1 = *(const tree *)p1;
265 f2 = *(const tree *)p2;
266 priority1 = DECL_FINI_PRIORITY (f1);
267 priority2 = DECL_FINI_PRIORITY (f2);
268
269 if (priority1 < priority2)
270 return -1;
271 else if (priority1 > priority2)
272 return 1;
273 else
274 /* Ensure a stable sort. */
275 return (const tree *)p1 - (const tree *)p2;
276 }
277
278 /* Generate functions to call static constructors and destructors
279 for targets that do not support .ctors/.dtors sections. These
280 functions have magic names which are detected by collect2. */
281
282 static void
283 cgraph_build_cdtor_fns (void)
284 {
285 if (!VEC_empty (tree, static_ctors))
286 {
287 gcc_assert (!targetm.have_ctors_dtors);
288 qsort (VEC_address (tree, static_ctors),
289 VEC_length (tree, static_ctors),
290 sizeof (tree),
291 compare_ctor);
292 build_cdtor (/*ctor_p=*/true,
293 VEC_address (tree, static_ctors),
294 VEC_length (tree, static_ctors));
295 VEC_truncate (tree, static_ctors, 0);
296 }
297
298 if (!VEC_empty (tree, static_dtors))
299 {
300 gcc_assert (!targetm.have_ctors_dtors);
301 qsort (VEC_address (tree, static_dtors),
302 VEC_length (tree, static_dtors),
303 sizeof (tree),
304 compare_dtor);
305 build_cdtor (/*ctor_p=*/false,
306 VEC_address (tree, static_dtors),
307 VEC_length (tree, static_dtors));
308 VEC_truncate (tree, static_dtors, 0);
309 }
310 }
311
312 /* Determine if function DECL is needed. That is, visible to something
313 either outside this translation unit, something magic in the system
314 configury. */
315
316 bool
317 cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
318 {
319 /* If the user told us it is used, then it must be so. */
320 if (node->local.externally_visible)
321 return true;
322
323 /* ??? If the assembler name is set by hand, it is possible to assemble
324 the name later after finalizing the function and the fact is noticed
325 in assemble_name then. This is arguably a bug. */
326 if (DECL_ASSEMBLER_NAME_SET_P (decl)
327 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
328 return true;
329
330 /* With -fkeep-inline-functions we are keeping all inline functions except
331 for extern inline ones. */
332 if (flag_keep_inline_functions
333 && DECL_DECLARED_INLINE_P (decl)
334 && !DECL_EXTERNAL (decl)
335 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl)))
336 return true;
337
338 /* If we decided it was needed before, but at the time we didn't have
339 the body of the function available, then it's still needed. We have
340 to go back and re-check its dependencies now. */
341 if (node->needed)
342 return true;
343
344 /* Externally visible functions must be output. The exception is
345 COMDAT functions that must be output only when they are needed.
346
347 When not optimizing, also output the static functions. (see
348 PR24561), but don't do so for always_inline functions, functions
349 declared inline and nested functions. These was optimized out
350 in the original implementation and it is unclear whether we want
351 to change the behavior here. */
352 if (((TREE_PUBLIC (decl)
353 || (!optimize && !node->local.disregard_inline_limits
354 && !DECL_DECLARED_INLINE_P (decl)
355 && !node->origin))
356 && !flag_whole_program
357 && !flag_lto
358 && !flag_whopr)
359 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
360 return true;
361
362 /* Constructors and destructors are reachable from the runtime by
363 some mechanism. */
364 if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
365 return true;
366
367 return false;
368 }
369
370 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
371 functions into callgraph in a way so they look like ordinary reachable
372 functions inserted into callgraph already at construction time. */
373
374 bool
375 cgraph_process_new_functions (void)
376 {
377 bool output = false;
378 tree fndecl;
379 struct cgraph_node *node;
380
381 /* Note that this queue may grow as its being processed, as the new
382 functions may generate new ones. */
383 while (cgraph_new_nodes)
384 {
385 node = cgraph_new_nodes;
386 fndecl = node->decl;
387 cgraph_new_nodes = cgraph_new_nodes->next_needed;
388 switch (cgraph_state)
389 {
390 case CGRAPH_STATE_CONSTRUCTION:
391 /* At construction time we just need to finalize function and move
392 it into reachable functions list. */
393
394 node->next_needed = NULL;
395 cgraph_finalize_function (fndecl, false);
396 cgraph_mark_reachable_node (node);
397 output = true;
398 break;
399
400 case CGRAPH_STATE_IPA:
401 case CGRAPH_STATE_IPA_SSA:
402 /* When IPA optimization already started, do all essential
403 transformations that has been already performed on the whole
404 cgraph but not on this function. */
405
406 gimple_register_cfg_hooks ();
407 if (!node->analyzed)
408 cgraph_analyze_function (node);
409 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
410 current_function_decl = fndecl;
411 compute_inline_parameters (node);
412 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
413 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
414 /* When not optimizing, be sure we run early local passes anyway
415 to expand OMP. */
416 || !optimize)
417 execute_pass_list (pass_early_local_passes.pass.sub);
418 free_dominance_info (CDI_POST_DOMINATORS);
419 free_dominance_info (CDI_DOMINATORS);
420 pop_cfun ();
421 current_function_decl = NULL;
422 break;
423
424 case CGRAPH_STATE_EXPANSION:
425 /* Functions created during expansion shall be compiled
426 directly. */
427 node->process = 0;
428 cgraph_expand_function (node);
429 break;
430
431 default:
432 gcc_unreachable ();
433 break;
434 }
435 cgraph_call_function_insertion_hooks (node);
436 }
437 return output;
438 }
439
440 /* As an GCC extension we allow redefinition of the function. The
441 semantics when both copies of bodies differ is not well defined.
442 We replace the old body with new body so in unit at a time mode
443 we always use new body, while in normal mode we may end up with
444 old body inlined into some functions and new body expanded and
445 inlined in others.
446
447 ??? It may make more sense to use one body for inlining and other
448 body for expanding the function but this is difficult to do. */
449
450 static void
451 cgraph_reset_node (struct cgraph_node *node)
452 {
453 /* If node->process is set, then we have already begun whole-unit analysis.
454 This is *not* testing for whether we've already emitted the function.
455 That case can be sort-of legitimately seen with real function redefinition
456 errors. I would argue that the front end should never present us with
457 such a case, but don't enforce that for now. */
458 gcc_assert (!node->process);
459
460 /* Reset our data structures so we can analyze the function again. */
461 memset (&node->local, 0, sizeof (node->local));
462 memset (&node->global, 0, sizeof (node->global));
463 memset (&node->rtl, 0, sizeof (node->rtl));
464 node->analyzed = false;
465 node->local.redefined_extern_inline = true;
466 node->local.finalized = false;
467
468 cgraph_node_remove_callees (node);
469
470 /* We may need to re-queue the node for assembling in case
471 we already proceeded it and ignored as not needed or got
472 a re-declaration in IMA mode. */
473 if (node->reachable)
474 {
475 struct cgraph_node *n;
476
477 for (n = cgraph_nodes_queue; n; n = n->next_needed)
478 if (n == node)
479 break;
480 if (!n)
481 node->reachable = 0;
482 }
483 }
484
485 static void
486 cgraph_lower_function (struct cgraph_node *node)
487 {
488 if (node->lowered)
489 return;
490
491 if (node->nested)
492 lower_nested_functions (node->decl);
493 gcc_assert (!node->nested);
494
495 tree_lowering_passes (node->decl);
496 node->lowered = true;
497 }
498
499 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
500 logic in effect. If NESTED is true, then our caller cannot stand to have
501 the garbage collector run at the moment. We would need to either create
502 a new GC context, or just not compile right now. */
503
504 void
505 cgraph_finalize_function (tree decl, bool nested)
506 {
507 struct cgraph_node *node = cgraph_node (decl);
508
509 if (node->local.finalized)
510 cgraph_reset_node (node);
511
512 node->pid = cgraph_max_pid ++;
513 notice_global_symbol (decl);
514 node->local.finalized = true;
515 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
516 node->finalized_by_frontend = true;
517 record_cdtor_fn (node->decl);
518
519 if (cgraph_decide_is_function_needed (node, decl))
520 cgraph_mark_needed_node (node);
521
522 /* Since we reclaim unreachable nodes at the end of every language
523 level unit, we need to be conservative about possible entry points
524 there. */
525 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)))
526 cgraph_mark_reachable_node (node);
527
528 /* If we've not yet emitted decl, tell the debug info about it. */
529 if (!TREE_ASM_WRITTEN (decl))
530 (*debug_hooks->deferred_inline_function) (decl);
531
532 /* Possibly warn about unused parameters. */
533 if (warn_unused_parameter)
534 do_warn_unused_parameter (decl);
535
536 if (!nested)
537 ggc_collect ();
538 }
539
540 /* C99 extern inline keywords allow changing of declaration after function
541 has been finalized. We need to re-decide if we want to mark the function as
542 needed then. */
543
544 void
545 cgraph_mark_if_needed (tree decl)
546 {
547 struct cgraph_node *node = cgraph_node (decl);
548 if (node->local.finalized && cgraph_decide_is_function_needed (node, decl))
549 cgraph_mark_needed_node (node);
550 }
551
552 /* Return TRUE if NODE2 is equivalent to NODE or its clone. */
553 static bool
554 clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
555 {
556 while (node != node2 && node2)
557 node2 = node2->clone_of;
558 return node2 != NULL;
559 }
560
561 /* Verify cgraph nodes of given cgraph node. */
562 void
563 verify_cgraph_node (struct cgraph_node *node)
564 {
565 struct cgraph_edge *e;
566 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
567 struct function *saved_cfun = cfun;
568 basic_block this_block;
569 gimple_stmt_iterator gsi;
570 bool error_found = false;
571
572 if (errorcount || sorrycount)
573 return;
574
575 timevar_push (TV_CGRAPH_VERIFY);
576 /* debug_generic_stmt needs correct cfun */
577 set_cfun (this_cfun);
578 for (e = node->callees; e; e = e->next_callee)
579 if (e->aux)
580 {
581 error ("aux field set for edge %s->%s",
582 identifier_to_locale (cgraph_node_name (e->caller)),
583 identifier_to_locale (cgraph_node_name (e->callee)));
584 error_found = true;
585 }
586 if (node->count < 0)
587 {
588 error ("Execution count is negative");
589 error_found = true;
590 }
591 if (node->global.inlined_to && node->local.externally_visible)
592 {
593 error ("Externally visible inline clone");
594 error_found = true;
595 }
596 if (node->global.inlined_to && node->address_taken)
597 {
598 error ("Inline clone with address taken");
599 error_found = true;
600 }
601 if (node->global.inlined_to && node->needed)
602 {
603 error ("Inline clone is needed");
604 error_found = true;
605 }
606 for (e = node->callers; e; e = e->next_caller)
607 {
608 if (e->count < 0)
609 {
610 error ("caller edge count is negative");
611 error_found = true;
612 }
613 if (e->frequency < 0)
614 {
615 error ("caller edge frequency is negative");
616 error_found = true;
617 }
618 if (e->frequency > CGRAPH_FREQ_MAX)
619 {
620 error ("caller edge frequency is too large");
621 error_found = true;
622 }
623 if (!e->inline_failed)
624 {
625 if (node->global.inlined_to
626 != (e->caller->global.inlined_to
627 ? e->caller->global.inlined_to : e->caller))
628 {
629 error ("inlined_to pointer is wrong");
630 error_found = true;
631 }
632 if (node->callers->next_caller)
633 {
634 error ("multiple inline callers");
635 error_found = true;
636 }
637 }
638 else
639 if (node->global.inlined_to)
640 {
641 error ("inlined_to pointer set for noninline callers");
642 error_found = true;
643 }
644 }
645 if (!node->callers && node->global.inlined_to)
646 {
647 error ("inlined_to pointer is set but no predecessors found");
648 error_found = true;
649 }
650 if (node->global.inlined_to == node)
651 {
652 error ("inlined_to pointer refers to itself");
653 error_found = true;
654 }
655
656 if (!cgraph_node (node->decl))
657 {
658 error ("node not found in cgraph_hash");
659 error_found = true;
660 }
661
662 if (node->clone_of)
663 {
664 struct cgraph_node *n;
665 for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
666 if (n == node)
667 break;
668 if (!n)
669 {
670 error ("node has wrong clone_of");
671 error_found = true;
672 }
673 }
674 if (node->clones)
675 {
676 struct cgraph_node *n;
677 for (n = node->clones; n; n = n->next_sibling_clone)
678 if (n->clone_of != node)
679 break;
680 if (n)
681 {
682 error ("node has wrong clone list");
683 error_found = true;
684 }
685 }
686 if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
687 {
688 error ("node is in clone list but it is not clone");
689 error_found = true;
690 }
691 if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
692 {
693 error ("node has wrong prev_clone pointer");
694 error_found = true;
695 }
696 if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
697 {
698 error ("double linked list of clones corrupted");
699 error_found = true;
700 }
701
702 if (node->analyzed && gimple_has_body_p (node->decl)
703 && !TREE_ASM_WRITTEN (node->decl)
704 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
705 && !flag_wpa)
706 {
707 if (this_cfun->cfg)
708 {
709 /* The nodes we're interested in are never shared, so walk
710 the tree ignoring duplicates. */
711 struct pointer_set_t *visited_nodes = pointer_set_create ();
712 /* Reach the trees by walking over the CFG, and note the
713 enclosing basic-blocks in the call edges. */
714 FOR_EACH_BB_FN (this_block, this_cfun)
715 for (gsi = gsi_start_bb (this_block);
716 !gsi_end_p (gsi);
717 gsi_next (&gsi))
718 {
719 gimple stmt = gsi_stmt (gsi);
720 tree decl;
721 if (is_gimple_call (stmt) && (decl = gimple_call_fndecl (stmt)))
722 {
723 struct cgraph_edge *e = cgraph_edge (node, stmt);
724 if (e)
725 {
726 if (e->aux)
727 {
728 error ("shared call_stmt:");
729 debug_gimple_stmt (stmt);
730 error_found = true;
731 }
732 if (!clone_of_p (cgraph_node (decl), e->callee)
733 && !e->callee->global.inlined_to)
734 {
735 error ("edge points to wrong declaration:");
736 debug_tree (e->callee->decl);
737 fprintf (stderr," Instead of:");
738 debug_tree (decl);
739 }
740 e->aux = (void *)1;
741 }
742 else
743 {
744 error ("missing callgraph edge for call stmt:");
745 debug_gimple_stmt (stmt);
746 error_found = true;
747 }
748 }
749 }
750 pointer_set_destroy (visited_nodes);
751 }
752 else
753 /* No CFG available?! */
754 gcc_unreachable ();
755
756 for (e = node->callees; e; e = e->next_callee)
757 {
758 if (!e->aux && !e->indirect_call)
759 {
760 error ("edge %s->%s has no corresponding call_stmt",
761 identifier_to_locale (cgraph_node_name (e->caller)),
762 identifier_to_locale (cgraph_node_name (e->callee)));
763 debug_gimple_stmt (e->call_stmt);
764 error_found = true;
765 }
766 e->aux = 0;
767 }
768 }
769 if (error_found)
770 {
771 dump_cgraph_node (stderr, node);
772 internal_error ("verify_cgraph_node failed");
773 }
774 set_cfun (saved_cfun);
775 timevar_pop (TV_CGRAPH_VERIFY);
776 }
777
778 /* Verify whole cgraph structure. */
779 void
780 verify_cgraph (void)
781 {
782 struct cgraph_node *node;
783
784 if (sorrycount || errorcount)
785 return;
786
787 for (node = cgraph_nodes; node; node = node->next)
788 verify_cgraph_node (node);
789 }
790
791 /* Output all asm statements we have stored up to be output. */
792
793 static void
794 cgraph_output_pending_asms (void)
795 {
796 struct cgraph_asm_node *can;
797
798 if (errorcount || sorrycount)
799 return;
800
801 for (can = cgraph_asm_nodes; can; can = can->next)
802 assemble_asm (can->asm_str);
803 cgraph_asm_nodes = NULL;
804 }
805
806 /* Analyze the function scheduled to be output. */
807 static void
808 cgraph_analyze_function (struct cgraph_node *node)
809 {
810 tree save = current_function_decl;
811 tree decl = node->decl;
812
813 current_function_decl = decl;
814 push_cfun (DECL_STRUCT_FUNCTION (decl));
815
816 /* Make sure to gimplify bodies only once. During analyzing a
817 function we lower it, which will require gimplified nested
818 functions, so we can end up here with an already gimplified
819 body. */
820 if (!gimple_body (decl))
821 gimplify_function_tree (decl);
822 dump_function (TDI_generic, decl);
823
824 cgraph_lower_function (node);
825 node->analyzed = true;
826
827 pop_cfun ();
828 current_function_decl = save;
829 }
830
831 /* Look for externally_visible and used attributes and mark cgraph nodes
832 accordingly.
833
834 We cannot mark the nodes at the point the attributes are processed (in
835 handle_*_attribute) because the copy of the declarations available at that
836 point may not be canonical. For example, in:
837
838 void f();
839 void f() __attribute__((used));
840
841 the declaration we see in handle_used_attribute will be the second
842 declaration -- but the front end will subsequently merge that declaration
843 with the original declaration and discard the second declaration.
844
845 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
846
847 void f() {}
848 void f() __attribute__((externally_visible));
849
850 is valid.
851
852 So, we walk the nodes at the end of the translation unit, applying the
853 attributes at that point. */
854
855 static void
856 process_function_and_variable_attributes (struct cgraph_node *first,
857 struct varpool_node *first_var)
858 {
859 struct cgraph_node *node;
860 struct varpool_node *vnode;
861
862 for (node = cgraph_nodes; node != first; node = node->next)
863 {
864 tree decl = node->decl;
865 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
866 {
867 mark_decl_referenced (decl);
868 if (node->local.finalized)
869 cgraph_mark_needed_node (node);
870 }
871 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
872 {
873 if (! TREE_PUBLIC (node->decl))
874 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
875 "%<externally_visible%>"
876 " attribute have effect only on public objects");
877 else if (node->local.finalized)
878 cgraph_mark_needed_node (node);
879 }
880 }
881 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
882 {
883 tree decl = vnode->decl;
884 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
885 {
886 mark_decl_referenced (decl);
887 if (vnode->finalized)
888 varpool_mark_needed_node (vnode);
889 }
890 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
891 {
892 if (! TREE_PUBLIC (vnode->decl))
893 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
894 "%<externally_visible%>"
895 " attribute have effect only on public objects");
896 else if (vnode->finalized)
897 varpool_mark_needed_node (vnode);
898 }
899 }
900 }
901
902 /* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
903 each reachable functions) and build cgraph.
904 The function can be called multiple times after inserting new nodes
905 into beginning of queue. Just the new part of queue is re-scanned then. */
906
907 static void
908 cgraph_analyze_functions (void)
909 {
910 /* Keep track of already processed nodes when called multiple times for
911 intermodule optimization. */
912 static struct cgraph_node *first_analyzed;
913 struct cgraph_node *first_processed = first_analyzed;
914 static struct varpool_node *first_analyzed_var;
915 struct cgraph_node *node, *next;
916
917 process_function_and_variable_attributes (first_processed,
918 first_analyzed_var);
919 first_processed = cgraph_nodes;
920 first_analyzed_var = varpool_nodes;
921 varpool_analyze_pending_decls ();
922 if (cgraph_dump_file)
923 {
924 fprintf (cgraph_dump_file, "Initial entry points:");
925 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
926 if (node->needed)
927 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
928 fprintf (cgraph_dump_file, "\n");
929 }
930 cgraph_process_new_functions ();
931
932 /* Propagate reachability flag and lower representation of all reachable
933 functions. In the future, lowering will introduce new functions and
934 new entry points on the way (by template instantiation and virtual
935 method table generation for instance). */
936 while (cgraph_nodes_queue)
937 {
938 struct cgraph_edge *edge;
939 tree decl = cgraph_nodes_queue->decl;
940
941 node = cgraph_nodes_queue;
942 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
943 node->next_needed = NULL;
944
945 /* ??? It is possible to create extern inline function and later using
946 weak alias attribute to kill its body. See
947 gcc.c-torture/compile/20011119-1.c */
948 if (!DECL_STRUCT_FUNCTION (decl))
949 {
950 cgraph_reset_node (node);
951 continue;
952 }
953
954 if (!node->analyzed)
955 cgraph_analyze_function (node);
956
957 for (edge = node->callees; edge; edge = edge->next_callee)
958 if (!edge->callee->reachable)
959 cgraph_mark_reachable_node (edge->callee);
960
961 /* If decl is a clone of an abstract function, mark that abstract
962 function so that we don't release its body. The DECL_INITIAL() of that
963 abstract function declaration will be later needed to output debug info. */
964 if (DECL_ABSTRACT_ORIGIN (decl))
965 {
966 struct cgraph_node *origin_node = cgraph_node (DECL_ABSTRACT_ORIGIN (decl));
967 origin_node->abstract_and_needed = true;
968 }
969
970 /* We finalize local static variables during constructing callgraph
971 edges. Process their attributes too. */
972 process_function_and_variable_attributes (first_processed,
973 first_analyzed_var);
974 first_processed = cgraph_nodes;
975 first_analyzed_var = varpool_nodes;
976 varpool_analyze_pending_decls ();
977 cgraph_process_new_functions ();
978 }
979
980 /* Collect entry points to the unit. */
981 if (cgraph_dump_file)
982 {
983 fprintf (cgraph_dump_file, "Unit entry points:");
984 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
985 if (node->needed)
986 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
987 fprintf (cgraph_dump_file, "\n\nInitial ");
988 dump_cgraph (cgraph_dump_file);
989 }
990
991 if (cgraph_dump_file)
992 fprintf (cgraph_dump_file, "\nReclaiming functions:");
993
994 for (node = cgraph_nodes; node != first_analyzed; node = next)
995 {
996 tree decl = node->decl;
997 next = node->next;
998
999 if (node->local.finalized && !gimple_has_body_p (decl))
1000 cgraph_reset_node (node);
1001
1002 if (!node->reachable && gimple_has_body_p (decl))
1003 {
1004 if (cgraph_dump_file)
1005 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1006 cgraph_remove_node (node);
1007 continue;
1008 }
1009 else
1010 node->next_needed = NULL;
1011 gcc_assert (!node->local.finalized || gimple_has_body_p (decl));
1012 gcc_assert (node->analyzed == node->local.finalized);
1013 }
1014 if (cgraph_dump_file)
1015 {
1016 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1017 dump_cgraph (cgraph_dump_file);
1018 }
1019 first_analyzed = cgraph_nodes;
1020 ggc_collect ();
1021 }
1022
1023
1024 /* Emit thunks for every node in the cgraph.
1025 FIXME: We really ought to emit thunks only for functions that are needed. */
1026
1027 static void
1028 cgraph_emit_thunks (void)
1029 {
1030 struct cgraph_node *n;
1031
1032 for (n = cgraph_nodes; n; n = n->next)
1033 {
1034 /* Only emit thunks on functions defined in this TU.
1035 Note that this may emit more thunks than strictly necessary.
1036 During optimization some nodes may disappear. It would be
1037 nice to only emit thunks only for the functions that will be
1038 emitted, but we cannot know that until the inliner and other
1039 IPA passes have run (see the sequencing of the call to
1040 cgraph_mark_functions_to_output in cgraph_optimize). */
1041 if (n->reachable
1042 && !DECL_EXTERNAL (n->decl))
1043 lang_hooks.callgraph.emit_associated_thunks (n->decl);
1044 }
1045 }
1046
1047
1048 /* Analyze the whole compilation unit once it is parsed completely. */
1049
1050 void
1051 cgraph_finalize_compilation_unit (void)
1052 {
1053 timevar_push (TV_CGRAPH);
1054
1055 /* Do not skip analyzing the functions if there were errors, we
1056 miss diagnostics for following functions otherwise. */
1057
1058 /* Emit size functions we didn't inline. */
1059 finalize_size_functions ();
1060
1061 /* Call functions declared with the "constructor" or "destructor"
1062 attribute. */
1063 cgraph_build_cdtor_fns ();
1064
1065 /* Mark alias targets necessary and emit diagnostics. */
1066 finish_aliases_1 ();
1067
1068 if (!quiet_flag)
1069 {
1070 fprintf (stderr, "\nAnalyzing compilation unit\n");
1071 fflush (stderr);
1072 }
1073
1074 /* Gimplify and lower all functions, compute reachability and
1075 remove unreachable nodes. */
1076 cgraph_analyze_functions ();
1077
1078 /* Emit thunks for reachable nodes, if needed. */
1079 if (lang_hooks.callgraph.emit_associated_thunks)
1080 cgraph_emit_thunks ();
1081
1082 /* Mark alias targets necessary and emit diagnostics. */
1083 finish_aliases_1 ();
1084
1085 /* Gimplify and lower thunks. */
1086 cgraph_analyze_functions ();
1087
1088 /* Finally drive the pass manager. */
1089 cgraph_optimize ();
1090
1091 timevar_pop (TV_CGRAPH);
1092 }
1093
1094
1095 /* Figure out what functions we want to assemble. */
1096
1097 static void
1098 cgraph_mark_functions_to_output (void)
1099 {
1100 struct cgraph_node *node;
1101
1102 for (node = cgraph_nodes; node; node = node->next)
1103 {
1104 tree decl = node->decl;
1105 struct cgraph_edge *e;
1106
1107 gcc_assert (!node->process);
1108
1109 for (e = node->callers; e; e = e->next_caller)
1110 if (e->inline_failed)
1111 break;
1112
1113 /* We need to output all local functions that are used and not
1114 always inlined, as well as those that are reachable from
1115 outside the current compilation unit. */
1116 if (node->analyzed
1117 && !node->global.inlined_to
1118 && (node->needed
1119 || (e && node->reachable))
1120 && !TREE_ASM_WRITTEN (decl)
1121 && !DECL_EXTERNAL (decl))
1122 node->process = 1;
1123 else
1124 {
1125 /* We should've reclaimed all functions that are not needed. */
1126 #ifdef ENABLE_CHECKING
1127 if (!node->global.inlined_to
1128 && gimple_has_body_p (decl)
1129 && !DECL_EXTERNAL (decl))
1130 {
1131 dump_cgraph_node (stderr, node);
1132 internal_error ("failed to reclaim unneeded function");
1133 }
1134 #endif
1135 gcc_assert (node->global.inlined_to
1136 || !gimple_has_body_p (decl)
1137 || DECL_EXTERNAL (decl));
1138
1139 }
1140
1141 }
1142 }
1143
1144 /* Expand function specified by NODE. */
1145
1146 static void
1147 cgraph_expand_function (struct cgraph_node *node)
1148 {
1149 tree decl = node->decl;
1150
1151 /* We ought to not compile any inline clones. */
1152 gcc_assert (!node->global.inlined_to);
1153
1154 announce_function (decl);
1155 node->process = 0;
1156
1157 gcc_assert (node->lowered);
1158
1159 /* Generate RTL for the body of DECL. */
1160 tree_rest_of_compilation (decl);
1161
1162 /* Make sure that BE didn't give up on compiling. */
1163 gcc_assert (TREE_ASM_WRITTEN (decl));
1164 current_function_decl = NULL;
1165 gcc_assert (!cgraph_preserve_function_body_p (decl));
1166 cgraph_release_function_body (node);
1167 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1168 points to the dead function body. */
1169 cgraph_node_remove_callees (node);
1170
1171 cgraph_function_flags_ready = true;
1172 }
1173
1174 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
1175
1176 bool
1177 cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason)
1178 {
1179 *reason = e->inline_failed;
1180 return !e->inline_failed;
1181 }
1182
1183
1184
1185 /* Expand all functions that must be output.
1186
1187 Attempt to topologically sort the nodes so function is output when
1188 all called functions are already assembled to allow data to be
1189 propagated across the callgraph. Use a stack to get smaller distance
1190 between a function and its callees (later we may choose to use a more
1191 sophisticated algorithm for function reordering; we will likely want
1192 to use subsections to make the output functions appear in top-down
1193 order). */
1194
1195 static void
1196 cgraph_expand_all_functions (void)
1197 {
1198 struct cgraph_node *node;
1199 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1200 int order_pos, new_order_pos = 0;
1201 int i;
1202
1203 order_pos = cgraph_postorder (order);
1204 gcc_assert (order_pos == cgraph_n_nodes);
1205
1206 /* Garbage collector may remove inline clones we eliminate during
1207 optimization. So we must be sure to not reference them. */
1208 for (i = 0; i < order_pos; i++)
1209 if (order[i]->process)
1210 order[new_order_pos++] = order[i];
1211
1212 for (i = new_order_pos - 1; i >= 0; i--)
1213 {
1214 node = order[i];
1215 if (node->process)
1216 {
1217 gcc_assert (node->reachable);
1218 node->process = 0;
1219 cgraph_expand_function (node);
1220 }
1221 }
1222 cgraph_process_new_functions ();
1223
1224 free (order);
1225
1226 }
1227
1228 /* This is used to sort the node types by the cgraph order number. */
1229
1230 enum cgraph_order_sort_kind
1231 {
1232 ORDER_UNDEFINED = 0,
1233 ORDER_FUNCTION,
1234 ORDER_VAR,
1235 ORDER_ASM
1236 };
1237
1238 struct cgraph_order_sort
1239 {
1240 enum cgraph_order_sort_kind kind;
1241 union
1242 {
1243 struct cgraph_node *f;
1244 struct varpool_node *v;
1245 struct cgraph_asm_node *a;
1246 } u;
1247 };
1248
1249 /* Output all functions, variables, and asm statements in the order
1250 according to their order fields, which is the order in which they
1251 appeared in the file. This implements -fno-toplevel-reorder. In
1252 this mode we may output functions and variables which don't really
1253 need to be output. */
1254
1255 static void
1256 cgraph_output_in_order (void)
1257 {
1258 int max;
1259 size_t size;
1260 struct cgraph_order_sort *nodes;
1261 int i;
1262 struct cgraph_node *pf;
1263 struct varpool_node *pv;
1264 struct cgraph_asm_node *pa;
1265
1266 max = cgraph_order;
1267 size = max * sizeof (struct cgraph_order_sort);
1268 nodes = (struct cgraph_order_sort *) alloca (size);
1269 memset (nodes, 0, size);
1270
1271 varpool_analyze_pending_decls ();
1272
1273 for (pf = cgraph_nodes; pf; pf = pf->next)
1274 {
1275 if (pf->process)
1276 {
1277 i = pf->order;
1278 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1279 nodes[i].kind = ORDER_FUNCTION;
1280 nodes[i].u.f = pf;
1281 }
1282 }
1283
1284 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
1285 {
1286 i = pv->order;
1287 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1288 nodes[i].kind = ORDER_VAR;
1289 nodes[i].u.v = pv;
1290 }
1291
1292 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1293 {
1294 i = pa->order;
1295 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1296 nodes[i].kind = ORDER_ASM;
1297 nodes[i].u.a = pa;
1298 }
1299
1300 /* In toplevel reorder mode we output all statics; mark them as needed. */
1301 for (i = 0; i < max; ++i)
1302 {
1303 if (nodes[i].kind == ORDER_VAR)
1304 {
1305 varpool_mark_needed_node (nodes[i].u.v);
1306 }
1307 }
1308 varpool_empty_needed_queue ();
1309
1310 for (i = 0; i < max; ++i)
1311 {
1312 switch (nodes[i].kind)
1313 {
1314 case ORDER_FUNCTION:
1315 nodes[i].u.f->process = 0;
1316 cgraph_expand_function (nodes[i].u.f);
1317 break;
1318
1319 case ORDER_VAR:
1320 varpool_assemble_decl (nodes[i].u.v);
1321 break;
1322
1323 case ORDER_ASM:
1324 assemble_asm (nodes[i].u.a->asm_str);
1325 break;
1326
1327 case ORDER_UNDEFINED:
1328 break;
1329
1330 default:
1331 gcc_unreachable ();
1332 }
1333 }
1334
1335 cgraph_asm_nodes = NULL;
1336 }
1337
1338 /* Return true when function body of DECL still needs to be kept around
1339 for later re-use. */
1340 bool
1341 cgraph_preserve_function_body_p (tree decl)
1342 {
1343 struct cgraph_node *node;
1344
1345 gcc_assert (cgraph_global_info_ready);
1346 /* Look if there is any clone around. */
1347 node = cgraph_node (decl);
1348 if (node->clones)
1349 return true;
1350 return false;
1351 }
1352
1353 static void
1354 ipa_passes (void)
1355 {
1356 set_cfun (NULL);
1357 current_function_decl = NULL;
1358 gimple_register_cfg_hooks ();
1359 bitmap_obstack_initialize (NULL);
1360
1361 if (!in_lto_p)
1362 execute_ipa_pass_list (all_small_ipa_passes);
1363
1364 /* If pass_all_early_optimizations was not scheduled, the state of
1365 the cgraph will not be properly updated. Update it now. */
1366 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
1367 cgraph_state = CGRAPH_STATE_IPA_SSA;
1368
1369 if (!in_lto_p)
1370 {
1371 /* Generate coverage variables and constructors. */
1372 coverage_finish ();
1373
1374 /* Process new functions added. */
1375 set_cfun (NULL);
1376 current_function_decl = NULL;
1377 cgraph_process_new_functions ();
1378
1379 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
1380 }
1381 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
1382
1383 if (!in_lto_p)
1384 ipa_write_summaries ();
1385
1386 if (!flag_ltrans)
1387 execute_ipa_pass_list (all_regular_ipa_passes);
1388
1389 bitmap_obstack_release (NULL);
1390 }
1391
1392
1393 /* Perform simple optimizations based on callgraph. */
1394
1395 void
1396 cgraph_optimize (void)
1397 {
1398 if (errorcount || sorrycount)
1399 return;
1400
1401 #ifdef ENABLE_CHECKING
1402 verify_cgraph ();
1403 #endif
1404
1405 /* Frontend may output common variables after the unit has been finalized.
1406 It is safe to deal with them here as they are always zero initialized. */
1407 varpool_analyze_pending_decls ();
1408
1409 timevar_push (TV_CGRAPHOPT);
1410 if (pre_ipa_mem_report)
1411 {
1412 fprintf (stderr, "Memory consumption before IPA\n");
1413 dump_memory_report (false);
1414 }
1415 if (!quiet_flag)
1416 fprintf (stderr, "Performing interprocedural optimizations\n");
1417 cgraph_state = CGRAPH_STATE_IPA;
1418
1419 /* Don't run the IPA passes if there was any error or sorry messages. */
1420 if (errorcount == 0 && sorrycount == 0)
1421 ipa_passes ();
1422
1423 /* Do nothing else if any IPA pass found errors. */
1424 if (errorcount || sorrycount)
1425 {
1426 timevar_pop (TV_CGRAPHOPT);
1427 return;
1428 }
1429
1430 /* This pass remove bodies of extern inline functions we never inlined.
1431 Do this later so other IPA passes see what is really going on. */
1432 cgraph_remove_unreachable_nodes (false, dump_file);
1433 cgraph_global_info_ready = true;
1434 if (cgraph_dump_file)
1435 {
1436 fprintf (cgraph_dump_file, "Optimized ");
1437 dump_cgraph (cgraph_dump_file);
1438 dump_varpool (cgraph_dump_file);
1439 }
1440 if (post_ipa_mem_report)
1441 {
1442 fprintf (stderr, "Memory consumption after IPA\n");
1443 dump_memory_report (false);
1444 }
1445 timevar_pop (TV_CGRAPHOPT);
1446
1447 /* Output everything. */
1448 (*debug_hooks->assembly_start) ();
1449 if (!quiet_flag)
1450 fprintf (stderr, "Assembling functions:\n");
1451 #ifdef ENABLE_CHECKING
1452 verify_cgraph ();
1453 #endif
1454
1455 cgraph_materialize_all_clones ();
1456 cgraph_mark_functions_to_output ();
1457
1458 cgraph_state = CGRAPH_STATE_EXPANSION;
1459 if (!flag_toplevel_reorder)
1460 cgraph_output_in_order ();
1461 else
1462 {
1463 cgraph_output_pending_asms ();
1464
1465 cgraph_expand_all_functions ();
1466 varpool_remove_unreferenced_decls ();
1467
1468 varpool_assemble_pending_decls ();
1469 }
1470 cgraph_process_new_functions ();
1471 cgraph_state = CGRAPH_STATE_FINISHED;
1472
1473 if (cgraph_dump_file)
1474 {
1475 fprintf (cgraph_dump_file, "\nFinal ");
1476 dump_cgraph (cgraph_dump_file);
1477 }
1478 #ifdef ENABLE_CHECKING
1479 verify_cgraph ();
1480 /* Double check that all inline clones are gone and that all
1481 function bodies have been released from memory. */
1482 if (!(sorrycount || errorcount))
1483 {
1484 struct cgraph_node *node;
1485 bool error_found = false;
1486
1487 for (node = cgraph_nodes; node; node = node->next)
1488 if (node->analyzed
1489 && (node->global.inlined_to
1490 || gimple_has_body_p (node->decl)))
1491 {
1492 error_found = true;
1493 dump_cgraph_node (stderr, node);
1494 }
1495 if (error_found)
1496 internal_error ("nodes with unreleased memory found");
1497 }
1498 #endif
1499 }
1500
1501
1502 /* Generate and emit a static constructor or destructor. WHICH must
1503 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
1504 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
1505 initialization priority for this constructor or destructor. */
1506
1507 void
1508 cgraph_build_static_cdtor (char which, tree body, int priority)
1509 {
1510 static int counter = 0;
1511 char which_buf[16];
1512 tree decl, name, resdecl;
1513
1514 /* The priority is encoded in the constructor or destructor name.
1515 collect2 will sort the names and arrange that they are called at
1516 program startup. */
1517 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
1518 name = get_file_function_name (which_buf);
1519
1520 decl = build_decl (input_location, FUNCTION_DECL, name,
1521 build_function_type (void_type_node, void_list_node));
1522 current_function_decl = decl;
1523
1524 resdecl = build_decl (input_location,
1525 RESULT_DECL, NULL_TREE, void_type_node);
1526 DECL_ARTIFICIAL (resdecl) = 1;
1527 DECL_RESULT (decl) = resdecl;
1528 DECL_CONTEXT (resdecl) = decl;
1529
1530 allocate_struct_function (decl, false);
1531
1532 TREE_STATIC (decl) = 1;
1533 TREE_USED (decl) = 1;
1534 DECL_ARTIFICIAL (decl) = 1;
1535 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1536 DECL_SAVED_TREE (decl) = body;
1537 TREE_PUBLIC (decl) = ! targetm.have_ctors_dtors;
1538 DECL_UNINLINABLE (decl) = 1;
1539
1540 DECL_INITIAL (decl) = make_node (BLOCK);
1541 TREE_USED (DECL_INITIAL (decl)) = 1;
1542
1543 DECL_SOURCE_LOCATION (decl) = input_location;
1544 cfun->function_end_locus = input_location;
1545
1546 switch (which)
1547 {
1548 case 'I':
1549 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1550 decl_init_priority_insert (decl, priority);
1551 break;
1552 case 'D':
1553 DECL_STATIC_DESTRUCTOR (decl) = 1;
1554 decl_fini_priority_insert (decl, priority);
1555 break;
1556 default:
1557 gcc_unreachable ();
1558 }
1559
1560 gimplify_function_tree (decl);
1561
1562 cgraph_add_new_function (decl, false);
1563 cgraph_mark_needed_node (cgraph_node (decl));
1564 set_cfun (NULL);
1565 }
1566
1567 void
1568 init_cgraph (void)
1569 {
1570 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1571 }
1572
1573 /* The edges representing the callers of the NEW_VERSION node were
1574 fixed by cgraph_function_versioning (), now the call_expr in their
1575 respective tree code should be updated to call the NEW_VERSION. */
1576
1577 static void
1578 update_call_expr (struct cgraph_node *new_version)
1579 {
1580 struct cgraph_edge *e;
1581
1582 gcc_assert (new_version);
1583
1584 /* Update the call expr on the edges to call the new version. */
1585 for (e = new_version->callers; e; e = e->next_caller)
1586 {
1587 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
1588 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
1589 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
1590 }
1591 }
1592
1593
1594 /* Create a new cgraph node which is the new version of
1595 OLD_VERSION node. REDIRECT_CALLERS holds the callers
1596 edges which should be redirected to point to
1597 NEW_VERSION. ALL the callees edges of OLD_VERSION
1598 are cloned to the new version node. Return the new
1599 version node. */
1600
1601 static struct cgraph_node *
1602 cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
1603 tree new_decl,
1604 VEC(cgraph_edge_p,heap) *redirect_callers)
1605 {
1606 struct cgraph_node *new_version;
1607 struct cgraph_edge *e, *new_e;
1608 struct cgraph_edge *next_callee;
1609 unsigned i;
1610
1611 gcc_assert (old_version);
1612
1613 new_version = cgraph_node (new_decl);
1614
1615 new_version->analyzed = true;
1616 new_version->local = old_version->local;
1617 new_version->global = old_version->global;
1618 new_version->rtl = new_version->rtl;
1619 new_version->reachable = true;
1620 new_version->count = old_version->count;
1621
1622 /* Clone the old node callees. Recursive calls are
1623 also cloned. */
1624 for (e = old_version->callees;e; e=e->next_callee)
1625 {
1626 new_e = cgraph_clone_edge (e, new_version, e->call_stmt,
1627 e->lto_stmt_uid, 0, e->frequency,
1628 e->loop_nest, true);
1629 new_e->count = e->count;
1630 }
1631 /* Fix recursive calls.
1632 If OLD_VERSION has a recursive call after the
1633 previous edge cloning, the new version will have an edge
1634 pointing to the old version, which is wrong;
1635 Redirect it to point to the new version. */
1636 for (e = new_version->callees ; e; e = next_callee)
1637 {
1638 next_callee = e->next_callee;
1639 if (e->callee == old_version)
1640 cgraph_redirect_edge_callee (e, new_version);
1641
1642 if (!next_callee)
1643 break;
1644 }
1645 for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
1646 {
1647 /* Redirect calls to the old version node to point to its new
1648 version. */
1649 cgraph_redirect_edge_callee (e, new_version);
1650 }
1651
1652 return new_version;
1653 }
1654
1655 /* Perform function versioning.
1656 Function versioning includes copying of the tree and
1657 a callgraph update (creating a new cgraph node and updating
1658 its callees and callers).
1659
1660 REDIRECT_CALLERS varray includes the edges to be redirected
1661 to the new version.
1662
1663 TREE_MAP is a mapping of tree nodes we want to replace with
1664 new ones (according to results of prior analysis).
1665 OLD_VERSION_NODE is the node that is versioned.
1666 It returns the new version's cgraph node.
1667 ARGS_TO_SKIP lists arguments to be omitted from functions
1668 */
1669
1670 struct cgraph_node *
1671 cgraph_function_versioning (struct cgraph_node *old_version_node,
1672 VEC(cgraph_edge_p,heap) *redirect_callers,
1673 VEC (ipa_replace_map_p,gc)* tree_map,
1674 bitmap args_to_skip)
1675 {
1676 tree old_decl = old_version_node->decl;
1677 struct cgraph_node *new_version_node = NULL;
1678 tree new_decl;
1679
1680 if (!tree_versionable_function_p (old_decl))
1681 return NULL;
1682
1683 /* Make a new FUNCTION_DECL tree node for the
1684 new version. */
1685 if (!args_to_skip)
1686 new_decl = copy_node (old_decl);
1687 else
1688 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
1689
1690 /* Create the new version's call-graph node.
1691 and update the edges of the new node. */
1692 new_version_node =
1693 cgraph_copy_node_for_versioning (old_version_node, new_decl,
1694 redirect_callers);
1695
1696 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1697 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip);
1698
1699 /* Update the new version's properties.
1700 Make The new version visible only within this translation unit. Make sure
1701 that is not weak also.
1702 ??? We cannot use COMDAT linkage because there is no
1703 ABI support for this. */
1704 DECL_EXTERNAL (new_version_node->decl) = 0;
1705 DECL_COMDAT_GROUP (new_version_node->decl) = NULL_TREE;
1706 TREE_PUBLIC (new_version_node->decl) = 0;
1707 DECL_COMDAT (new_version_node->decl) = 0;
1708 DECL_WEAK (new_version_node->decl) = 0;
1709 DECL_VIRTUAL_P (new_version_node->decl) = 0;
1710 new_version_node->local.externally_visible = 0;
1711 new_version_node->local.local = 1;
1712 new_version_node->lowered = true;
1713
1714 /* Update the call_expr on the edges to call the new version node. */
1715 update_call_expr (new_version_node);
1716
1717 cgraph_call_function_insertion_hooks (new_version_node);
1718 return new_version_node;
1719 }
1720
1721 /* Produce separate function body for inline clones so the offline copy can be
1722 modified without affecting them. */
1723 struct cgraph_node *
1724 save_inline_function_body (struct cgraph_node *node)
1725 {
1726 struct cgraph_node *first_clone, *n;
1727
1728 gcc_assert (node == cgraph_node (node->decl));
1729
1730 cgraph_lower_function (node);
1731
1732 first_clone = node->clones;
1733
1734 first_clone->decl = copy_node (node->decl);
1735 cgraph_insert_node_to_hashtable (first_clone);
1736 gcc_assert (first_clone == cgraph_node (first_clone->decl));
1737 if (first_clone->next_sibling_clone)
1738 {
1739 for (n = first_clone->next_sibling_clone; n->next_sibling_clone; n = n->next_sibling_clone)
1740 n->clone_of = first_clone;
1741 n->clone_of = first_clone;
1742 n->next_sibling_clone = first_clone->clones;
1743 if (first_clone->clones)
1744 first_clone->clones->prev_sibling_clone = n;
1745 first_clone->clones = first_clone->next_sibling_clone;
1746 first_clone->next_sibling_clone->prev_sibling_clone = NULL;
1747 first_clone->next_sibling_clone = NULL;
1748 gcc_assert (!first_clone->prev_sibling_clone);
1749 }
1750 first_clone->clone_of = NULL;
1751 node->clones = NULL;
1752
1753 if (first_clone->clones)
1754 for (n = first_clone->clones; n != first_clone;)
1755 {
1756 gcc_assert (n->decl == node->decl);
1757 n->decl = first_clone->decl;
1758 if (n->clones)
1759 n = n->clones;
1760 else if (n->next_sibling_clone)
1761 n = n->next_sibling_clone;
1762 else
1763 {
1764 while (n != first_clone && !n->next_sibling_clone)
1765 n = n->clone_of;
1766 if (n != first_clone)
1767 n = n->next_sibling_clone;
1768 }
1769 }
1770
1771 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1772 tree_function_versioning (node->decl, first_clone->decl, NULL, true, NULL);
1773
1774 DECL_EXTERNAL (first_clone->decl) = 0;
1775 DECL_COMDAT_GROUP (first_clone->decl) = NULL_TREE;
1776 TREE_PUBLIC (first_clone->decl) = 0;
1777 DECL_COMDAT (first_clone->decl) = 0;
1778 VEC_free (ipa_opt_pass, heap,
1779 DECL_STRUCT_FUNCTION (first_clone->decl)->ipa_transforms_to_apply);
1780 DECL_STRUCT_FUNCTION (first_clone->decl)->ipa_transforms_to_apply = NULL;
1781
1782 #ifdef ENABLE_CHECKING
1783 verify_cgraph_node (first_clone);
1784 #endif
1785 return first_clone;
1786 }
1787
1788 /* Given virtual clone, turn it into actual clone. */
1789 static void
1790 cgraph_materialize_clone (struct cgraph_node *node)
1791 {
1792 bitmap_obstack_initialize (NULL);
1793 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1794 tree_function_versioning (node->clone_of->decl, node->decl,
1795 node->clone.tree_map, true,
1796 node->clone.args_to_skip);
1797 if (cgraph_dump_file)
1798 {
1799 dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
1800 dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
1801 }
1802
1803 /* Function is no longer clone. */
1804 if (node->next_sibling_clone)
1805 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
1806 if (node->prev_sibling_clone)
1807 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
1808 else
1809 node->clone_of->clones = node->next_sibling_clone;
1810 node->next_sibling_clone = NULL;
1811 node->prev_sibling_clone = NULL;
1812 node->clone_of = NULL;
1813 bitmap_obstack_release (NULL);
1814 }
1815
1816 /* Once all functions from compilation unit are in memory, produce all clones
1817 and update all calls.
1818 We might also do this on demand if we don't want to bring all functions to
1819 memory prior compilation, but current WHOPR implementation does that and it is
1820 is bit easier to keep everything right in this order. */
1821 void
1822 cgraph_materialize_all_clones (void)
1823 {
1824 struct cgraph_node *node;
1825 bool stabilized = false;
1826
1827 if (cgraph_dump_file)
1828 fprintf (cgraph_dump_file, "Materializing clones\n");
1829 #ifdef ENABLE_CHECKING
1830 verify_cgraph ();
1831 #endif
1832
1833 /* We can also do topological order, but number of iterations should be
1834 bounded by number of IPA passes since single IPA pass is probably not
1835 going to create clones of clones it created itself. */
1836 while (!stabilized)
1837 {
1838 stabilized = true;
1839 for (node = cgraph_nodes; node; node = node->next)
1840 {
1841 if (node->clone_of && node->decl != node->clone_of->decl
1842 && !gimple_has_body_p (node->decl))
1843 {
1844 if (gimple_has_body_p (node->clone_of->decl))
1845 {
1846 if (cgraph_dump_file)
1847 {
1848 fprintf (cgraph_dump_file, "clonning %s to %s\n",
1849 cgraph_node_name (node->clone_of),
1850 cgraph_node_name (node));
1851 if (node->clone.tree_map)
1852 {
1853 unsigned int i;
1854 fprintf (cgraph_dump_file, " replace map: ");
1855 for (i = 0; i < VEC_length (ipa_replace_map_p,
1856 node->clone.tree_map);
1857 i++)
1858 {
1859 struct ipa_replace_map *replace_info;
1860 replace_info = VEC_index (ipa_replace_map_p,
1861 node->clone.tree_map,
1862 i);
1863 print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
1864 fprintf (cgraph_dump_file, " -> ");
1865 print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
1866 fprintf (cgraph_dump_file, "%s%s;",
1867 replace_info->replace_p ? "(replace)":"",
1868 replace_info->ref_p ? "(ref)":"");
1869 }
1870 fprintf (cgraph_dump_file, "\n");
1871 }
1872 if (node->clone.args_to_skip)
1873 {
1874 fprintf (cgraph_dump_file, " args_to_skip: ");
1875 dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
1876 }
1877 if (node->clone.args_to_skip)
1878 {
1879 fprintf (cgraph_dump_file, " combined_args_to_skip:");
1880 dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
1881 }
1882 }
1883 cgraph_materialize_clone (node);
1884 }
1885 else
1886 stabilized = false;
1887 }
1888 }
1889 }
1890 if (cgraph_dump_file)
1891 fprintf (cgraph_dump_file, "Updating call sites\n");
1892 for (node = cgraph_nodes; node; node = node->next)
1893 if (node->analyzed && gimple_has_body_p (node->decl)
1894 && (!node->clone_of || node->clone_of->decl != node->decl))
1895 {
1896 struct cgraph_edge *e;
1897
1898 current_function_decl = node->decl;
1899 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
1900 for (e = node->callees; e; e = e->next_callee)
1901 {
1902 tree decl = gimple_call_fndecl (e->call_stmt);
1903 /* When function gets inlined, indirect inlining might've invented
1904 new edge for orginally indirect stmt. Since we are not
1905 preserving clones in the original form, we must not update here
1906 since other inline clones don't need to contain call to the same
1907 call. Inliner will do the substitution for us later. */
1908 if (decl && decl != e->callee->decl)
1909 {
1910 gimple new_stmt;
1911 gimple_stmt_iterator gsi;
1912
1913 if (cgraph_dump_file)
1914 {
1915 fprintf (cgraph_dump_file, "updating call of %s in %s:",
1916 cgraph_node_name (node),
1917 cgraph_node_name (e->callee));
1918 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
1919 }
1920
1921 if (e->callee->clone.combined_args_to_skip)
1922 new_stmt = gimple_call_copy_skip_args (e->call_stmt,
1923 e->callee->clone.combined_args_to_skip);
1924 else
1925 new_stmt = e->call_stmt;
1926 if (gimple_vdef (new_stmt)
1927 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1928 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1929 gimple_call_set_fndecl (new_stmt, e->callee->decl);
1930
1931 gsi = gsi_for_stmt (e->call_stmt);
1932 gsi_replace (&gsi, new_stmt, true);
1933
1934 /* Update EH information too, just in case. */
1935 maybe_clean_or_replace_eh_stmt (e->call_stmt, new_stmt);
1936
1937 cgraph_set_call_stmt_including_clones (node, e->call_stmt, new_stmt);
1938
1939 if (cgraph_dump_file)
1940 {
1941 fprintf (cgraph_dump_file, " updated to:");
1942 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
1943 }
1944 }
1945 }
1946 pop_cfun ();
1947 current_function_decl = NULL;
1948 #ifdef ENABLE_CHECKING
1949 verify_cgraph_node (node);
1950 #endif
1951 }
1952 #ifdef ENABLE_CHECKING
1953 verify_cgraph ();
1954 #endif
1955 cgraph_remove_unreachable_nodes (false, cgraph_dump_file);
1956 }
1957
1958 #include "gt-cgraphunit.h"