Merge lto branch into trunk.
[gcc.git] / gcc / cgraphunit.c
1 /* Callgraph based interprocedural optimizations.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This module implements main driver of compilation process as well as
23 few basic interprocedural optimizers.
24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
35 (There is one exception needed for implementing GCC extern inline
36 function.)
37
38 - varpool_finalize_variable
39
40 This function has same behavior as the above but is used for static
41 variables.
42
43 - cgraph_finalize_compilation_unit
44
45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
47
48 In the the call-graph construction and local function
49 analysis takes place here. Bodies of unreachable functions are released
50 to conserve memory usage.
51
52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
54
55 - cgraph_optimize
56
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
61
62 - cgraph_mark_needed_node
63 - varpool_mark_needed_node
64
65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
69 used by C++ frontend to explicitly mark the keyed methods.
70
71 - analyze_expr callback
72
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
76
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
79
80 Analyzing of all functions is deferred
81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
82
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
90
91 The intra-procedural information is produced and its existence
92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
95
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
98
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
103
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
106 where reference has been optimized out. */
107
108
109 #include "config.h"
110 #include "system.h"
111 #include "coretypes.h"
112 #include "tm.h"
113 #include "tree.h"
114 #include "rtl.h"
115 #include "tree-flow.h"
116 #include "tree-inline.h"
117 #include "langhooks.h"
118 #include "pointer-set.h"
119 #include "toplev.h"
120 #include "flags.h"
121 #include "ggc.h"
122 #include "debug.h"
123 #include "target.h"
124 #include "cgraph.h"
125 #include "diagnostic.h"
126 #include "timevar.h"
127 #include "params.h"
128 #include "fibheap.h"
129 #include "intl.h"
130 #include "function.h"
131 #include "ipa-prop.h"
132 #include "gimple.h"
133 #include "tree-iterator.h"
134 #include "tree-pass.h"
135 #include "tree-dump.h"
136 #include "output.h"
137 #include "coverage.h"
138
139 static void cgraph_expand_all_functions (void);
140 static void cgraph_mark_functions_to_output (void);
141 static void cgraph_expand_function (struct cgraph_node *);
142 static void cgraph_output_pending_asms (void);
143 static void cgraph_analyze_function (struct cgraph_node *);
144
145 static FILE *cgraph_dump_file;
146
147 /* A vector of FUNCTION_DECLs declared as static constructors. */
148 static GTY (()) VEC(tree, gc) *static_ctors;
149 /* A vector of FUNCTION_DECLs declared as static destructors. */
150 static GTY (()) VEC(tree, gc) *static_dtors;
151
152 /* When target does not have ctors and dtors, we call all constructor
153 and destructor by special initialization/destruction function
154 recognized by collect2.
155
156 When we are going to build this function, collect all constructors and
157 destructors and turn them into normal functions. */
158
159 static void
160 record_cdtor_fn (tree fndecl)
161 {
162 struct cgraph_node *node;
163 if (targetm.have_ctors_dtors
164 || (!DECL_STATIC_CONSTRUCTOR (fndecl)
165 && !DECL_STATIC_DESTRUCTOR (fndecl)))
166 return;
167
168 if (DECL_STATIC_CONSTRUCTOR (fndecl))
169 {
170 VEC_safe_push (tree, gc, static_ctors, fndecl);
171 DECL_STATIC_CONSTRUCTOR (fndecl) = 0;
172 }
173 if (DECL_STATIC_DESTRUCTOR (fndecl))
174 {
175 VEC_safe_push (tree, gc, static_dtors, fndecl);
176 DECL_STATIC_DESTRUCTOR (fndecl) = 0;
177 }
178 node = cgraph_node (fndecl);
179 node->local.disregard_inline_limits = 1;
180 cgraph_mark_reachable_node (node);
181 }
182
183 /* Define global constructors/destructor functions for the CDTORS, of
184 which they are LEN. The CDTORS are sorted by initialization
185 priority. If CTOR_P is true, these are constructors; otherwise,
186 they are destructors. */
187
188 static void
189 build_cdtor (bool ctor_p, tree *cdtors, size_t len)
190 {
191 size_t i;
192
193 i = 0;
194 while (i < len)
195 {
196 tree body;
197 tree fn;
198 priority_type priority;
199
200 priority = 0;
201 body = NULL_TREE;
202 /* Find the next batch of constructors/destructors with the same
203 initialization priority. */
204 do
205 {
206 priority_type p;
207 fn = cdtors[i];
208 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
209 if (!body)
210 priority = p;
211 else if (p != priority)
212 break;
213 append_to_statement_list (build_function_call_expr (UNKNOWN_LOCATION,
214 fn, 0),
215 &body);
216 ++i;
217 }
218 while (i < len);
219 gcc_assert (body != NULL_TREE);
220 /* Generate a function to call all the function of like
221 priority. */
222 cgraph_build_static_cdtor (ctor_p ? 'I' : 'D', body, priority);
223 }
224 }
225
226 /* Comparison function for qsort. P1 and P2 are actually of type
227 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
228 used to determine the sort order. */
229
230 static int
231 compare_ctor (const void *p1, const void *p2)
232 {
233 tree f1;
234 tree f2;
235 int priority1;
236 int priority2;
237
238 f1 = *(const tree *)p1;
239 f2 = *(const tree *)p2;
240 priority1 = DECL_INIT_PRIORITY (f1);
241 priority2 = DECL_INIT_PRIORITY (f2);
242
243 if (priority1 < priority2)
244 return -1;
245 else if (priority1 > priority2)
246 return 1;
247 else
248 /* Ensure a stable sort. */
249 return (const tree *)p1 - (const tree *)p2;
250 }
251
252 /* Comparison function for qsort. P1 and P2 are actually of type
253 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
254 used to determine the sort order. */
255
256 static int
257 compare_dtor (const void *p1, const void *p2)
258 {
259 tree f1;
260 tree f2;
261 int priority1;
262 int priority2;
263
264 f1 = *(const tree *)p1;
265 f2 = *(const tree *)p2;
266 priority1 = DECL_FINI_PRIORITY (f1);
267 priority2 = DECL_FINI_PRIORITY (f2);
268
269 if (priority1 < priority2)
270 return -1;
271 else if (priority1 > priority2)
272 return 1;
273 else
274 /* Ensure a stable sort. */
275 return (const tree *)p1 - (const tree *)p2;
276 }
277
278 /* Generate functions to call static constructors and destructors
279 for targets that do not support .ctors/.dtors sections. These
280 functions have magic names which are detected by collect2. */
281
282 static void
283 cgraph_build_cdtor_fns (void)
284 {
285 if (!VEC_empty (tree, static_ctors))
286 {
287 gcc_assert (!targetm.have_ctors_dtors);
288 qsort (VEC_address (tree, static_ctors),
289 VEC_length (tree, static_ctors),
290 sizeof (tree),
291 compare_ctor);
292 build_cdtor (/*ctor_p=*/true,
293 VEC_address (tree, static_ctors),
294 VEC_length (tree, static_ctors));
295 VEC_truncate (tree, static_ctors, 0);
296 }
297
298 if (!VEC_empty (tree, static_dtors))
299 {
300 gcc_assert (!targetm.have_ctors_dtors);
301 qsort (VEC_address (tree, static_dtors),
302 VEC_length (tree, static_dtors),
303 sizeof (tree),
304 compare_dtor);
305 build_cdtor (/*ctor_p=*/false,
306 VEC_address (tree, static_dtors),
307 VEC_length (tree, static_dtors));
308 VEC_truncate (tree, static_dtors, 0);
309 }
310 }
311
312 /* Determine if function DECL is needed. That is, visible to something
313 either outside this translation unit, something magic in the system
314 configury. */
315
316 bool
317 cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
318 {
319 if (MAIN_NAME_P (DECL_NAME (decl))
320 && TREE_PUBLIC (decl))
321 {
322 node->local.externally_visible = true;
323 return true;
324 }
325
326 /* If the user told us it is used, then it must be so. */
327 if (node->local.externally_visible)
328 return true;
329
330 /* ??? If the assembler name is set by hand, it is possible to assemble
331 the name later after finalizing the function and the fact is noticed
332 in assemble_name then. This is arguably a bug. */
333 if (DECL_ASSEMBLER_NAME_SET_P (decl)
334 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
335 return true;
336
337 /* With -fkeep-inline-functions we are keeping all inline functions except
338 for extern inline ones. */
339 if (flag_keep_inline_functions
340 && DECL_DECLARED_INLINE_P (decl)
341 && !DECL_EXTERNAL (decl)
342 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl)))
343 return true;
344
345 /* If we decided it was needed before, but at the time we didn't have
346 the body of the function available, then it's still needed. We have
347 to go back and re-check its dependencies now. */
348 if (node->needed)
349 return true;
350
351 /* Externally visible functions must be output. The exception is
352 COMDAT functions that must be output only when they are needed.
353
354 When not optimizing, also output the static functions. (see
355 PR24561), but don't do so for always_inline functions, functions
356 declared inline and nested functions. These was optimized out
357 in the original implementation and it is unclear whether we want
358 to change the behavior here. */
359 if (((TREE_PUBLIC (decl)
360 || (!optimize && !node->local.disregard_inline_limits
361 && !DECL_DECLARED_INLINE_P (decl)
362 && !node->origin))
363 && !flag_whole_program)
364 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
365 return true;
366
367 /* Constructors and destructors are reachable from the runtime by
368 some mechanism. */
369 if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
370 return true;
371
372 return false;
373 }
374
375 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
376 functions into callgraph in a way so they look like ordinary reachable
377 functions inserted into callgraph already at construction time. */
378
379 bool
380 cgraph_process_new_functions (void)
381 {
382 bool output = false;
383 tree fndecl;
384 struct cgraph_node *node;
385
386 /* Note that this queue may grow as its being processed, as the new
387 functions may generate new ones. */
388 while (cgraph_new_nodes)
389 {
390 node = cgraph_new_nodes;
391 fndecl = node->decl;
392 cgraph_new_nodes = cgraph_new_nodes->next_needed;
393 switch (cgraph_state)
394 {
395 case CGRAPH_STATE_CONSTRUCTION:
396 /* At construction time we just need to finalize function and move
397 it into reachable functions list. */
398
399 node->next_needed = NULL;
400 cgraph_finalize_function (fndecl, false);
401 cgraph_mark_reachable_node (node);
402 output = true;
403 break;
404
405 case CGRAPH_STATE_IPA:
406 case CGRAPH_STATE_IPA_SSA:
407 /* When IPA optimization already started, do all essential
408 transformations that has been already performed on the whole
409 cgraph but not on this function. */
410
411 gimple_register_cfg_hooks ();
412 if (!node->analyzed)
413 cgraph_analyze_function (node);
414 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
415 current_function_decl = fndecl;
416 compute_inline_parameters (node);
417 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
418 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
419 /* When not optimizing, be sure we run early local passes anyway
420 to expand OMP. */
421 || !optimize)
422 execute_pass_list (pass_early_local_passes.pass.sub);
423 free_dominance_info (CDI_POST_DOMINATORS);
424 free_dominance_info (CDI_DOMINATORS);
425 pop_cfun ();
426 current_function_decl = NULL;
427 break;
428
429 case CGRAPH_STATE_EXPANSION:
430 /* Functions created during expansion shall be compiled
431 directly. */
432 node->process = 0;
433 cgraph_expand_function (node);
434 break;
435
436 default:
437 gcc_unreachable ();
438 break;
439 }
440 cgraph_call_function_insertion_hooks (node);
441 }
442 return output;
443 }
444
445 /* As an GCC extension we allow redefinition of the function. The
446 semantics when both copies of bodies differ is not well defined.
447 We replace the old body with new body so in unit at a time mode
448 we always use new body, while in normal mode we may end up with
449 old body inlined into some functions and new body expanded and
450 inlined in others.
451
452 ??? It may make more sense to use one body for inlining and other
453 body for expanding the function but this is difficult to do. */
454
455 static void
456 cgraph_reset_node (struct cgraph_node *node)
457 {
458 /* If node->process is set, then we have already begun whole-unit analysis.
459 This is *not* testing for whether we've already emitted the function.
460 That case can be sort-of legitimately seen with real function redefinition
461 errors. I would argue that the front end should never present us with
462 such a case, but don't enforce that for now. */
463 gcc_assert (!node->process);
464
465 /* Reset our data structures so we can analyze the function again. */
466 memset (&node->local, 0, sizeof (node->local));
467 memset (&node->global, 0, sizeof (node->global));
468 memset (&node->rtl, 0, sizeof (node->rtl));
469 node->analyzed = false;
470 node->local.redefined_extern_inline = true;
471 node->local.finalized = false;
472
473 cgraph_node_remove_callees (node);
474
475 /* We may need to re-queue the node for assembling in case
476 we already proceeded it and ignored as not needed or got
477 a re-declaration in IMA mode. */
478 if (node->reachable)
479 {
480 struct cgraph_node *n;
481
482 for (n = cgraph_nodes_queue; n; n = n->next_needed)
483 if (n == node)
484 break;
485 if (!n)
486 node->reachable = 0;
487 }
488 }
489
490 static void
491 cgraph_lower_function (struct cgraph_node *node)
492 {
493 if (node->lowered)
494 return;
495
496 if (node->nested)
497 lower_nested_functions (node->decl);
498 gcc_assert (!node->nested);
499
500 tree_lowering_passes (node->decl);
501 node->lowered = true;
502 }
503
504 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
505 logic in effect. If NESTED is true, then our caller cannot stand to have
506 the garbage collector run at the moment. We would need to either create
507 a new GC context, or just not compile right now. */
508
509 void
510 cgraph_finalize_function (tree decl, bool nested)
511 {
512 struct cgraph_node *node = cgraph_node (decl);
513
514 if (node->local.finalized)
515 cgraph_reset_node (node);
516
517 node->pid = cgraph_max_pid ++;
518 notice_global_symbol (decl);
519 node->local.finalized = true;
520 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
521 node->finalized_by_frontend = true;
522 record_cdtor_fn (node->decl);
523
524 if (cgraph_decide_is_function_needed (node, decl))
525 cgraph_mark_needed_node (node);
526
527 /* Since we reclaim unreachable nodes at the end of every language
528 level unit, we need to be conservative about possible entry points
529 there. */
530 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)))
531 cgraph_mark_reachable_node (node);
532
533 /* If we've not yet emitted decl, tell the debug info about it. */
534 if (!TREE_ASM_WRITTEN (decl))
535 (*debug_hooks->deferred_inline_function) (decl);
536
537 /* Possibly warn about unused parameters. */
538 if (warn_unused_parameter)
539 do_warn_unused_parameter (decl);
540
541 if (!nested)
542 ggc_collect ();
543 }
544
545 /* C99 extern inline keywords allow changing of declaration after function
546 has been finalized. We need to re-decide if we want to mark the function as
547 needed then. */
548
549 void
550 cgraph_mark_if_needed (tree decl)
551 {
552 struct cgraph_node *node = cgraph_node (decl);
553 if (node->local.finalized && cgraph_decide_is_function_needed (node, decl))
554 cgraph_mark_needed_node (node);
555 }
556
557 /* Return TRUE if NODE2 is equivalent to NODE or its clone. */
558 static bool
559 clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
560 {
561 while (node != node2 && node2)
562 node2 = node2->clone_of;
563 return node2 != NULL;
564 }
565
566 /* Verify cgraph nodes of given cgraph node. */
567 void
568 verify_cgraph_node (struct cgraph_node *node)
569 {
570 struct cgraph_edge *e;
571 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
572 struct function *saved_cfun = cfun;
573 basic_block this_block;
574 gimple_stmt_iterator gsi;
575 bool error_found = false;
576
577 if (errorcount || sorrycount)
578 return;
579
580 timevar_push (TV_CGRAPH_VERIFY);
581 /* debug_generic_stmt needs correct cfun */
582 set_cfun (this_cfun);
583 for (e = node->callees; e; e = e->next_callee)
584 if (e->aux)
585 {
586 error ("aux field set for edge %s->%s",
587 identifier_to_locale (cgraph_node_name (e->caller)),
588 identifier_to_locale (cgraph_node_name (e->callee)));
589 error_found = true;
590 }
591 if (node->count < 0)
592 {
593 error ("Execution count is negative");
594 error_found = true;
595 }
596 for (e = node->callers; e; e = e->next_caller)
597 {
598 if (e->count < 0)
599 {
600 error ("caller edge count is negative");
601 error_found = true;
602 }
603 if (e->frequency < 0)
604 {
605 error ("caller edge frequency is negative");
606 error_found = true;
607 }
608 if (e->frequency > CGRAPH_FREQ_MAX)
609 {
610 error ("caller edge frequency is too large");
611 error_found = true;
612 }
613 if (!e->inline_failed)
614 {
615 if (node->global.inlined_to
616 != (e->caller->global.inlined_to
617 ? e->caller->global.inlined_to : e->caller))
618 {
619 error ("inlined_to pointer is wrong");
620 error_found = true;
621 }
622 if (node->callers->next_caller)
623 {
624 error ("multiple inline callers");
625 error_found = true;
626 }
627 }
628 else
629 if (node->global.inlined_to)
630 {
631 error ("inlined_to pointer set for noninline callers");
632 error_found = true;
633 }
634 }
635 if (!node->callers && node->global.inlined_to)
636 {
637 error ("inlined_to pointer is set but no predecessors found");
638 error_found = true;
639 }
640 if (node->global.inlined_to == node)
641 {
642 error ("inlined_to pointer refers to itself");
643 error_found = true;
644 }
645
646 if (!cgraph_node (node->decl))
647 {
648 error ("node not found in cgraph_hash");
649 error_found = true;
650 }
651
652 if (node->clone_of)
653 {
654 struct cgraph_node *n;
655 for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
656 if (n == node)
657 break;
658 if (!n)
659 {
660 error ("node has wrong clone_of");
661 error_found = true;
662 }
663 }
664 if (node->clones)
665 {
666 struct cgraph_node *n;
667 for (n = node->clones; n; n = n->next_sibling_clone)
668 if (n->clone_of != node)
669 break;
670 if (n)
671 {
672 error ("node has wrong clone list");
673 error_found = true;
674 }
675 }
676 if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
677 {
678 error ("node is in clone list but it is not clone");
679 error_found = true;
680 }
681 if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
682 {
683 error ("node has wrong prev_clone pointer");
684 error_found = true;
685 }
686 if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
687 {
688 error ("double linked list of clones corrupted");
689 error_found = true;
690 }
691
692 if (node->analyzed && gimple_has_body_p (node->decl)
693 && !TREE_ASM_WRITTEN (node->decl)
694 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
695 && !flag_wpa)
696 {
697 if (this_cfun->cfg)
698 {
699 /* The nodes we're interested in are never shared, so walk
700 the tree ignoring duplicates. */
701 struct pointer_set_t *visited_nodes = pointer_set_create ();
702 /* Reach the trees by walking over the CFG, and note the
703 enclosing basic-blocks in the call edges. */
704 FOR_EACH_BB_FN (this_block, this_cfun)
705 for (gsi = gsi_start_bb (this_block);
706 !gsi_end_p (gsi);
707 gsi_next (&gsi))
708 {
709 gimple stmt = gsi_stmt (gsi);
710 tree decl;
711 if (is_gimple_call (stmt) && (decl = gimple_call_fndecl (stmt)))
712 {
713 struct cgraph_edge *e = cgraph_edge (node, stmt);
714 if (e)
715 {
716 if (e->aux)
717 {
718 error ("shared call_stmt:");
719 debug_gimple_stmt (stmt);
720 error_found = true;
721 }
722 if (!clone_of_p (cgraph_node (decl), e->callee)
723 && !e->callee->global.inlined_to)
724 {
725 error ("edge points to wrong declaration:");
726 debug_tree (e->callee->decl);
727 fprintf (stderr," Instead of:");
728 debug_tree (decl);
729 }
730 e->aux = (void *)1;
731 }
732 else
733 {
734 error ("missing callgraph edge for call stmt:");
735 debug_gimple_stmt (stmt);
736 error_found = true;
737 }
738 }
739 }
740 pointer_set_destroy (visited_nodes);
741 }
742 else
743 /* No CFG available?! */
744 gcc_unreachable ();
745
746 for (e = node->callees; e; e = e->next_callee)
747 {
748 if (!e->aux && !e->indirect_call)
749 {
750 error ("edge %s->%s has no corresponding call_stmt",
751 identifier_to_locale (cgraph_node_name (e->caller)),
752 identifier_to_locale (cgraph_node_name (e->callee)));
753 debug_gimple_stmt (e->call_stmt);
754 error_found = true;
755 }
756 e->aux = 0;
757 }
758 }
759 if (error_found)
760 {
761 dump_cgraph_node (stderr, node);
762 internal_error ("verify_cgraph_node failed");
763 }
764 set_cfun (saved_cfun);
765 timevar_pop (TV_CGRAPH_VERIFY);
766 }
767
768 /* Verify whole cgraph structure. */
769 void
770 verify_cgraph (void)
771 {
772 struct cgraph_node *node;
773
774 if (sorrycount || errorcount)
775 return;
776
777 for (node = cgraph_nodes; node; node = node->next)
778 verify_cgraph_node (node);
779 }
780
781 /* Output all asm statements we have stored up to be output. */
782
783 static void
784 cgraph_output_pending_asms (void)
785 {
786 struct cgraph_asm_node *can;
787
788 if (errorcount || sorrycount)
789 return;
790
791 for (can = cgraph_asm_nodes; can; can = can->next)
792 assemble_asm (can->asm_str);
793 cgraph_asm_nodes = NULL;
794 }
795
796 /* Analyze the function scheduled to be output. */
797 static void
798 cgraph_analyze_function (struct cgraph_node *node)
799 {
800 tree save = current_function_decl;
801 tree decl = node->decl;
802
803 current_function_decl = decl;
804 push_cfun (DECL_STRUCT_FUNCTION (decl));
805
806 /* Make sure to gimplify bodies only once. During analyzing a
807 function we lower it, which will require gimplified nested
808 functions, so we can end up here with an already gimplified
809 body. */
810 if (!gimple_body (decl))
811 gimplify_function_tree (decl);
812 dump_function (TDI_generic, decl);
813
814 cgraph_lower_function (node);
815 node->analyzed = true;
816
817 pop_cfun ();
818 current_function_decl = save;
819 }
820
821 /* Look for externally_visible and used attributes and mark cgraph nodes
822 accordingly.
823
824 We cannot mark the nodes at the point the attributes are processed (in
825 handle_*_attribute) because the copy of the declarations available at that
826 point may not be canonical. For example, in:
827
828 void f();
829 void f() __attribute__((used));
830
831 the declaration we see in handle_used_attribute will be the second
832 declaration -- but the front end will subsequently merge that declaration
833 with the original declaration and discard the second declaration.
834
835 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
836
837 void f() {}
838 void f() __attribute__((externally_visible));
839
840 is valid.
841
842 So, we walk the nodes at the end of the translation unit, applying the
843 attributes at that point. */
844
845 static void
846 process_function_and_variable_attributes (struct cgraph_node *first,
847 struct varpool_node *first_var)
848 {
849 struct cgraph_node *node;
850 struct varpool_node *vnode;
851
852 for (node = cgraph_nodes; node != first; node = node->next)
853 {
854 tree decl = node->decl;
855 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
856 {
857 mark_decl_referenced (decl);
858 if (node->local.finalized)
859 cgraph_mark_needed_node (node);
860 }
861 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
862 {
863 if (! TREE_PUBLIC (node->decl))
864 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
865 "%<externally_visible%>"
866 " attribute have effect only on public objects");
867 else
868 {
869 if (node->local.finalized)
870 cgraph_mark_needed_node (node);
871 node->local.externally_visible = true;
872 }
873 }
874 }
875 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
876 {
877 tree decl = vnode->decl;
878 if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
879 {
880 mark_decl_referenced (decl);
881 if (vnode->finalized)
882 varpool_mark_needed_node (vnode);
883 }
884 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
885 {
886 if (! TREE_PUBLIC (vnode->decl))
887 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
888 "%<externally_visible%>"
889 " attribute have effect only on public objects");
890 else
891 {
892 if (vnode->finalized)
893 varpool_mark_needed_node (vnode);
894 vnode->externally_visible = true;
895 }
896 }
897 }
898 }
899
900 /* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
901 each reachable functions) and build cgraph.
902 The function can be called multiple times after inserting new nodes
903 into beginning of queue. Just the new part of queue is re-scanned then. */
904
905 static void
906 cgraph_analyze_functions (void)
907 {
908 /* Keep track of already processed nodes when called multiple times for
909 intermodule optimization. */
910 static struct cgraph_node *first_analyzed;
911 struct cgraph_node *first_processed = first_analyzed;
912 static struct varpool_node *first_analyzed_var;
913 struct cgraph_node *node, *next;
914
915 process_function_and_variable_attributes (first_processed,
916 first_analyzed_var);
917 first_processed = cgraph_nodes;
918 first_analyzed_var = varpool_nodes;
919 varpool_analyze_pending_decls ();
920 if (cgraph_dump_file)
921 {
922 fprintf (cgraph_dump_file, "Initial entry points:");
923 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
924 if (node->needed)
925 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
926 fprintf (cgraph_dump_file, "\n");
927 }
928 cgraph_process_new_functions ();
929
930 /* Propagate reachability flag and lower representation of all reachable
931 functions. In the future, lowering will introduce new functions and
932 new entry points on the way (by template instantiation and virtual
933 method table generation for instance). */
934 while (cgraph_nodes_queue)
935 {
936 struct cgraph_edge *edge;
937 tree decl = cgraph_nodes_queue->decl;
938
939 node = cgraph_nodes_queue;
940 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
941 node->next_needed = NULL;
942
943 /* ??? It is possible to create extern inline function and later using
944 weak alias attribute to kill its body. See
945 gcc.c-torture/compile/20011119-1.c */
946 if (!DECL_STRUCT_FUNCTION (decl))
947 {
948 cgraph_reset_node (node);
949 continue;
950 }
951
952 if (!node->analyzed)
953 cgraph_analyze_function (node);
954
955 for (edge = node->callees; edge; edge = edge->next_callee)
956 if (!edge->callee->reachable)
957 cgraph_mark_reachable_node (edge->callee);
958
959 /* If decl is a clone of an abstract function, mark that abstract
960 function so that we don't release its body. The DECL_INITIAL() of that
961 abstract function declaration will be later needed to output debug info. */
962 if (DECL_ABSTRACT_ORIGIN (decl))
963 {
964 struct cgraph_node *origin_node = cgraph_node (DECL_ABSTRACT_ORIGIN (decl));
965 origin_node->abstract_and_needed = true;
966 }
967
968 /* We finalize local static variables during constructing callgraph
969 edges. Process their attributes too. */
970 process_function_and_variable_attributes (first_processed,
971 first_analyzed_var);
972 first_processed = cgraph_nodes;
973 first_analyzed_var = varpool_nodes;
974 varpool_analyze_pending_decls ();
975 cgraph_process_new_functions ();
976 }
977
978 /* Collect entry points to the unit. */
979 if (cgraph_dump_file)
980 {
981 fprintf (cgraph_dump_file, "Unit entry points:");
982 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
983 if (node->needed)
984 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
985 fprintf (cgraph_dump_file, "\n\nInitial ");
986 dump_cgraph (cgraph_dump_file);
987 }
988
989 if (cgraph_dump_file)
990 fprintf (cgraph_dump_file, "\nReclaiming functions:");
991
992 for (node = cgraph_nodes; node != first_analyzed; node = next)
993 {
994 tree decl = node->decl;
995 next = node->next;
996
997 if (node->local.finalized && !gimple_has_body_p (decl))
998 cgraph_reset_node (node);
999
1000 if (!node->reachable && gimple_has_body_p (decl))
1001 {
1002 if (cgraph_dump_file)
1003 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1004 cgraph_remove_node (node);
1005 continue;
1006 }
1007 else
1008 node->next_needed = NULL;
1009 gcc_assert (!node->local.finalized || gimple_has_body_p (decl));
1010 gcc_assert (node->analyzed == node->local.finalized);
1011 }
1012 if (cgraph_dump_file)
1013 {
1014 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1015 dump_cgraph (cgraph_dump_file);
1016 }
1017 first_analyzed = cgraph_nodes;
1018 ggc_collect ();
1019 }
1020
1021
1022 /* Emit thunks for every node in the cgraph.
1023 FIXME: We really ought to emit thunks only for functions that are needed. */
1024
1025 static void
1026 cgraph_emit_thunks (void)
1027 {
1028 struct cgraph_node *n;
1029
1030 for (n = cgraph_nodes; n; n = n->next)
1031 {
1032 /* Only emit thunks on functions defined in this TU.
1033 Note that this may emit more thunks than strictly necessary.
1034 During optimization some nodes may disappear. It would be
1035 nice to only emit thunks only for the functions that will be
1036 emitted, but we cannot know that until the inliner and other
1037 IPA passes have run (see the sequencing of the call to
1038 cgraph_mark_functions_to_output in cgraph_optimize). */
1039 if (n->reachable
1040 && !DECL_EXTERNAL (n->decl))
1041 lang_hooks.callgraph.emit_associated_thunks (n->decl);
1042 }
1043 }
1044
1045
1046 /* Analyze the whole compilation unit once it is parsed completely. */
1047
1048 void
1049 cgraph_finalize_compilation_unit (void)
1050 {
1051 timevar_push (TV_CGRAPH);
1052
1053 /* Do not skip analyzing the functions if there were errors, we
1054 miss diagnostics for following functions otherwise. */
1055
1056 /* Emit size functions we didn't inline. */
1057 finalize_size_functions ();
1058
1059 /* Call functions declared with the "constructor" or "destructor"
1060 attribute. */
1061 cgraph_build_cdtor_fns ();
1062
1063 /* Mark alias targets necessary and emit diagnostics. */
1064 finish_aliases_1 ();
1065
1066 if (!quiet_flag)
1067 {
1068 fprintf (stderr, "\nAnalyzing compilation unit\n");
1069 fflush (stderr);
1070 }
1071
1072 /* Gimplify and lower all functions, compute reachability and
1073 remove unreachable nodes. */
1074 cgraph_analyze_functions ();
1075
1076 /* Emit thunks for reachable nodes, if needed. */
1077 if (lang_hooks.callgraph.emit_associated_thunks)
1078 cgraph_emit_thunks ();
1079
1080 /* Mark alias targets necessary and emit diagnostics. */
1081 finish_aliases_1 ();
1082
1083 /* Gimplify and lower thunks. */
1084 cgraph_analyze_functions ();
1085
1086 /* Finally drive the pass manager. */
1087 cgraph_optimize ();
1088
1089 timevar_pop (TV_CGRAPH);
1090 }
1091
1092
1093 /* Figure out what functions we want to assemble. */
1094
1095 static void
1096 cgraph_mark_functions_to_output (void)
1097 {
1098 struct cgraph_node *node;
1099
1100 for (node = cgraph_nodes; node; node = node->next)
1101 {
1102 tree decl = node->decl;
1103 struct cgraph_edge *e;
1104
1105 gcc_assert (!node->process);
1106
1107 for (e = node->callers; e; e = e->next_caller)
1108 if (e->inline_failed)
1109 break;
1110
1111 /* We need to output all local functions that are used and not
1112 always inlined, as well as those that are reachable from
1113 outside the current compilation unit. */
1114 if (node->analyzed
1115 && !node->global.inlined_to
1116 && (node->needed
1117 || (e && node->reachable))
1118 && !TREE_ASM_WRITTEN (decl)
1119 && !DECL_EXTERNAL (decl))
1120 node->process = 1;
1121 else
1122 {
1123 /* We should've reclaimed all functions that are not needed. */
1124 #ifdef ENABLE_CHECKING
1125 if (!node->global.inlined_to
1126 && gimple_has_body_p (decl)
1127 && !DECL_EXTERNAL (decl))
1128 {
1129 dump_cgraph_node (stderr, node);
1130 internal_error ("failed to reclaim unneeded function");
1131 }
1132 #endif
1133 gcc_assert (node->global.inlined_to
1134 || !gimple_has_body_p (decl)
1135 || DECL_EXTERNAL (decl));
1136
1137 }
1138
1139 }
1140 }
1141
1142 /* Expand function specified by NODE. */
1143
1144 static void
1145 cgraph_expand_function (struct cgraph_node *node)
1146 {
1147 tree decl = node->decl;
1148
1149 /* We ought to not compile any inline clones. */
1150 gcc_assert (!node->global.inlined_to);
1151
1152 announce_function (decl);
1153 node->process = 0;
1154
1155 gcc_assert (node->lowered);
1156
1157 /* Generate RTL for the body of DECL. */
1158 tree_rest_of_compilation (decl);
1159
1160 /* Make sure that BE didn't give up on compiling. */
1161 gcc_assert (TREE_ASM_WRITTEN (decl));
1162 current_function_decl = NULL;
1163 gcc_assert (!cgraph_preserve_function_body_p (decl));
1164 cgraph_release_function_body (node);
1165 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1166 points to the dead function body. */
1167 cgraph_node_remove_callees (node);
1168
1169 cgraph_function_flags_ready = true;
1170 }
1171
1172 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
1173
1174 bool
1175 cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason)
1176 {
1177 *reason = e->inline_failed;
1178 return !e->inline_failed;
1179 }
1180
1181
1182
1183 /* Expand all functions that must be output.
1184
1185 Attempt to topologically sort the nodes so function is output when
1186 all called functions are already assembled to allow data to be
1187 propagated across the callgraph. Use a stack to get smaller distance
1188 between a function and its callees (later we may choose to use a more
1189 sophisticated algorithm for function reordering; we will likely want
1190 to use subsections to make the output functions appear in top-down
1191 order). */
1192
1193 static void
1194 cgraph_expand_all_functions (void)
1195 {
1196 struct cgraph_node *node;
1197 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1198 int order_pos, new_order_pos = 0;
1199 int i;
1200
1201 order_pos = cgraph_postorder (order);
1202 gcc_assert (order_pos == cgraph_n_nodes);
1203
1204 /* Garbage collector may remove inline clones we eliminate during
1205 optimization. So we must be sure to not reference them. */
1206 for (i = 0; i < order_pos; i++)
1207 if (order[i]->process)
1208 order[new_order_pos++] = order[i];
1209
1210 for (i = new_order_pos - 1; i >= 0; i--)
1211 {
1212 node = order[i];
1213 if (node->process)
1214 {
1215 gcc_assert (node->reachable);
1216 node->process = 0;
1217 cgraph_expand_function (node);
1218 }
1219 }
1220 cgraph_process_new_functions ();
1221
1222 free (order);
1223
1224 }
1225
1226 /* This is used to sort the node types by the cgraph order number. */
1227
1228 enum cgraph_order_sort_kind
1229 {
1230 ORDER_UNDEFINED = 0,
1231 ORDER_FUNCTION,
1232 ORDER_VAR,
1233 ORDER_ASM
1234 };
1235
1236 struct cgraph_order_sort
1237 {
1238 enum cgraph_order_sort_kind kind;
1239 union
1240 {
1241 struct cgraph_node *f;
1242 struct varpool_node *v;
1243 struct cgraph_asm_node *a;
1244 } u;
1245 };
1246
1247 /* Output all functions, variables, and asm statements in the order
1248 according to their order fields, which is the order in which they
1249 appeared in the file. This implements -fno-toplevel-reorder. In
1250 this mode we may output functions and variables which don't really
1251 need to be output. */
1252
1253 static void
1254 cgraph_output_in_order (void)
1255 {
1256 int max;
1257 size_t size;
1258 struct cgraph_order_sort *nodes;
1259 int i;
1260 struct cgraph_node *pf;
1261 struct varpool_node *pv;
1262 struct cgraph_asm_node *pa;
1263
1264 max = cgraph_order;
1265 size = max * sizeof (struct cgraph_order_sort);
1266 nodes = (struct cgraph_order_sort *) alloca (size);
1267 memset (nodes, 0, size);
1268
1269 varpool_analyze_pending_decls ();
1270
1271 for (pf = cgraph_nodes; pf; pf = pf->next)
1272 {
1273 if (pf->process)
1274 {
1275 i = pf->order;
1276 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1277 nodes[i].kind = ORDER_FUNCTION;
1278 nodes[i].u.f = pf;
1279 }
1280 }
1281
1282 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
1283 {
1284 i = pv->order;
1285 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1286 nodes[i].kind = ORDER_VAR;
1287 nodes[i].u.v = pv;
1288 }
1289
1290 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1291 {
1292 i = pa->order;
1293 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1294 nodes[i].kind = ORDER_ASM;
1295 nodes[i].u.a = pa;
1296 }
1297
1298 /* In toplevel reorder mode we output all statics; mark them as needed. */
1299 for (i = 0; i < max; ++i)
1300 {
1301 if (nodes[i].kind == ORDER_VAR)
1302 {
1303 varpool_mark_needed_node (nodes[i].u.v);
1304 }
1305 }
1306 varpool_empty_needed_queue ();
1307
1308 for (i = 0; i < max; ++i)
1309 {
1310 switch (nodes[i].kind)
1311 {
1312 case ORDER_FUNCTION:
1313 nodes[i].u.f->process = 0;
1314 cgraph_expand_function (nodes[i].u.f);
1315 break;
1316
1317 case ORDER_VAR:
1318 varpool_assemble_decl (nodes[i].u.v);
1319 break;
1320
1321 case ORDER_ASM:
1322 assemble_asm (nodes[i].u.a->asm_str);
1323 break;
1324
1325 case ORDER_UNDEFINED:
1326 break;
1327
1328 default:
1329 gcc_unreachable ();
1330 }
1331 }
1332
1333 cgraph_asm_nodes = NULL;
1334 }
1335
1336 /* Return true when function body of DECL still needs to be kept around
1337 for later re-use. */
1338 bool
1339 cgraph_preserve_function_body_p (tree decl)
1340 {
1341 struct cgraph_node *node;
1342
1343 gcc_assert (cgraph_global_info_ready);
1344 /* Look if there is any clone around. */
1345 node = cgraph_node (decl);
1346 if (node->clones)
1347 return true;
1348 return false;
1349 }
1350
1351 static void
1352 ipa_passes (void)
1353 {
1354 set_cfun (NULL);
1355 current_function_decl = NULL;
1356 gimple_register_cfg_hooks ();
1357 bitmap_obstack_initialize (NULL);
1358 execute_ipa_pass_list (all_small_ipa_passes);
1359
1360 /* If pass_all_early_optimizations was not scheduled, the state of
1361 the cgraph will not be properly updated. Update it now. */
1362 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
1363 cgraph_state = CGRAPH_STATE_IPA_SSA;
1364
1365 if (!in_lto_p)
1366 {
1367 /* Generate coverage variables and constructors. */
1368 coverage_finish ();
1369
1370 /* Process new functions added. */
1371 set_cfun (NULL);
1372 current_function_decl = NULL;
1373 cgraph_process_new_functions ();
1374 }
1375
1376 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
1377 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
1378
1379 if (!in_lto_p)
1380 ipa_write_summaries ();
1381
1382 execute_ipa_pass_list (all_regular_ipa_passes);
1383
1384 bitmap_obstack_release (NULL);
1385 }
1386
1387
1388 /* Perform simple optimizations based on callgraph. */
1389
1390 void
1391 cgraph_optimize (void)
1392 {
1393 if (errorcount || sorrycount)
1394 return;
1395
1396 #ifdef ENABLE_CHECKING
1397 verify_cgraph ();
1398 #endif
1399
1400 /* Frontend may output common variables after the unit has been finalized.
1401 It is safe to deal with them here as they are always zero initialized. */
1402 varpool_analyze_pending_decls ();
1403
1404 timevar_push (TV_CGRAPHOPT);
1405 if (pre_ipa_mem_report)
1406 {
1407 fprintf (stderr, "Memory consumption before IPA\n");
1408 dump_memory_report (false);
1409 }
1410 if (!quiet_flag)
1411 fprintf (stderr, "Performing interprocedural optimizations\n");
1412 cgraph_state = CGRAPH_STATE_IPA;
1413
1414 /* Don't run the IPA passes if there was any error or sorry messages. */
1415 if (errorcount == 0 && sorrycount == 0)
1416 ipa_passes ();
1417
1418 /* Do nothing else if any IPA pass found errors. */
1419 if (errorcount || sorrycount)
1420 return;
1421
1422 /* This pass remove bodies of extern inline functions we never inlined.
1423 Do this later so other IPA passes see what is really going on. */
1424 cgraph_remove_unreachable_nodes (false, dump_file);
1425 cgraph_global_info_ready = true;
1426 if (cgraph_dump_file)
1427 {
1428 fprintf (cgraph_dump_file, "Optimized ");
1429 dump_cgraph (cgraph_dump_file);
1430 dump_varpool (cgraph_dump_file);
1431 }
1432 if (post_ipa_mem_report)
1433 {
1434 fprintf (stderr, "Memory consumption after IPA\n");
1435 dump_memory_report (false);
1436 }
1437 timevar_pop (TV_CGRAPHOPT);
1438
1439 /* Output everything. */
1440 if (!quiet_flag)
1441 fprintf (stderr, "Assembling functions:\n");
1442 #ifdef ENABLE_CHECKING
1443 verify_cgraph ();
1444 #endif
1445
1446 cgraph_materialize_all_clones ();
1447 cgraph_mark_functions_to_output ();
1448
1449 cgraph_state = CGRAPH_STATE_EXPANSION;
1450 if (!flag_toplevel_reorder)
1451 cgraph_output_in_order ();
1452 else
1453 {
1454 cgraph_output_pending_asms ();
1455
1456 cgraph_expand_all_functions ();
1457 varpool_remove_unreferenced_decls ();
1458
1459 varpool_assemble_pending_decls ();
1460 }
1461 cgraph_process_new_functions ();
1462 cgraph_state = CGRAPH_STATE_FINISHED;
1463
1464 if (cgraph_dump_file)
1465 {
1466 fprintf (cgraph_dump_file, "\nFinal ");
1467 dump_cgraph (cgraph_dump_file);
1468 }
1469 #ifdef ENABLE_CHECKING
1470 verify_cgraph ();
1471 /* Double check that all inline clones are gone and that all
1472 function bodies have been released from memory. */
1473 if (!(sorrycount || errorcount))
1474 {
1475 struct cgraph_node *node;
1476 bool error_found = false;
1477
1478 for (node = cgraph_nodes; node; node = node->next)
1479 if (node->analyzed
1480 && (node->global.inlined_to
1481 || gimple_has_body_p (node->decl)))
1482 {
1483 error_found = true;
1484 dump_cgraph_node (stderr, node);
1485 }
1486 if (error_found)
1487 internal_error ("nodes with unreleased memory found");
1488 }
1489 #endif
1490 }
1491
1492
1493 /* Generate and emit a static constructor or destructor. WHICH must
1494 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
1495 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
1496 initialization priority for this constructor or destructor. */
1497
1498 void
1499 cgraph_build_static_cdtor (char which, tree body, int priority)
1500 {
1501 static int counter = 0;
1502 char which_buf[16];
1503 tree decl, name, resdecl;
1504
1505 /* The priority is encoded in the constructor or destructor name.
1506 collect2 will sort the names and arrange that they are called at
1507 program startup. */
1508 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
1509 name = get_file_function_name (which_buf);
1510
1511 decl = build_decl (input_location, FUNCTION_DECL, name,
1512 build_function_type (void_type_node, void_list_node));
1513 current_function_decl = decl;
1514
1515 resdecl = build_decl (input_location,
1516 RESULT_DECL, NULL_TREE, void_type_node);
1517 DECL_ARTIFICIAL (resdecl) = 1;
1518 DECL_RESULT (decl) = resdecl;
1519 DECL_CONTEXT (resdecl) = decl;
1520
1521 allocate_struct_function (decl, false);
1522
1523 TREE_STATIC (decl) = 1;
1524 TREE_USED (decl) = 1;
1525 DECL_ARTIFICIAL (decl) = 1;
1526 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1527 DECL_SAVED_TREE (decl) = body;
1528 TREE_PUBLIC (decl) = ! targetm.have_ctors_dtors;
1529 DECL_UNINLINABLE (decl) = 1;
1530
1531 DECL_INITIAL (decl) = make_node (BLOCK);
1532 TREE_USED (DECL_INITIAL (decl)) = 1;
1533
1534 DECL_SOURCE_LOCATION (decl) = input_location;
1535 cfun->function_end_locus = input_location;
1536
1537 switch (which)
1538 {
1539 case 'I':
1540 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1541 decl_init_priority_insert (decl, priority);
1542 break;
1543 case 'D':
1544 DECL_STATIC_DESTRUCTOR (decl) = 1;
1545 decl_fini_priority_insert (decl, priority);
1546 break;
1547 default:
1548 gcc_unreachable ();
1549 }
1550
1551 gimplify_function_tree (decl);
1552
1553 cgraph_add_new_function (decl, false);
1554 cgraph_mark_needed_node (cgraph_node (decl));
1555 set_cfun (NULL);
1556 }
1557
1558 void
1559 init_cgraph (void)
1560 {
1561 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1562 }
1563
1564 /* The edges representing the callers of the NEW_VERSION node were
1565 fixed by cgraph_function_versioning (), now the call_expr in their
1566 respective tree code should be updated to call the NEW_VERSION. */
1567
1568 static void
1569 update_call_expr (struct cgraph_node *new_version)
1570 {
1571 struct cgraph_edge *e;
1572
1573 gcc_assert (new_version);
1574
1575 /* Update the call expr on the edges to call the new version. */
1576 for (e = new_version->callers; e; e = e->next_caller)
1577 {
1578 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
1579 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
1580 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
1581 }
1582 }
1583
1584
1585 /* Create a new cgraph node which is the new version of
1586 OLD_VERSION node. REDIRECT_CALLERS holds the callers
1587 edges which should be redirected to point to
1588 NEW_VERSION. ALL the callees edges of OLD_VERSION
1589 are cloned to the new version node. Return the new
1590 version node. */
1591
1592 static struct cgraph_node *
1593 cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
1594 tree new_decl,
1595 VEC(cgraph_edge_p,heap) *redirect_callers)
1596 {
1597 struct cgraph_node *new_version;
1598 struct cgraph_edge *e, *new_e;
1599 struct cgraph_edge *next_callee;
1600 unsigned i;
1601
1602 gcc_assert (old_version);
1603
1604 new_version = cgraph_node (new_decl);
1605
1606 new_version->analyzed = true;
1607 new_version->local = old_version->local;
1608 new_version->global = old_version->global;
1609 new_version->rtl = new_version->rtl;
1610 new_version->reachable = true;
1611 new_version->count = old_version->count;
1612
1613 /* Clone the old node callees. Recursive calls are
1614 also cloned. */
1615 for (e = old_version->callees;e; e=e->next_callee)
1616 {
1617 new_e = cgraph_clone_edge (e, new_version, e->call_stmt,
1618 e->lto_stmt_uid, 0, e->frequency,
1619 e->loop_nest, true);
1620 new_e->count = e->count;
1621 }
1622 /* Fix recursive calls.
1623 If OLD_VERSION has a recursive call after the
1624 previous edge cloning, the new version will have an edge
1625 pointing to the old version, which is wrong;
1626 Redirect it to point to the new version. */
1627 for (e = new_version->callees ; e; e = next_callee)
1628 {
1629 next_callee = e->next_callee;
1630 if (e->callee == old_version)
1631 cgraph_redirect_edge_callee (e, new_version);
1632
1633 if (!next_callee)
1634 break;
1635 }
1636 for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
1637 {
1638 /* Redirect calls to the old version node to point to its new
1639 version. */
1640 cgraph_redirect_edge_callee (e, new_version);
1641 }
1642
1643 return new_version;
1644 }
1645
1646 /* Perform function versioning.
1647 Function versioning includes copying of the tree and
1648 a callgraph update (creating a new cgraph node and updating
1649 its callees and callers).
1650
1651 REDIRECT_CALLERS varray includes the edges to be redirected
1652 to the new version.
1653
1654 TREE_MAP is a mapping of tree nodes we want to replace with
1655 new ones (according to results of prior analysis).
1656 OLD_VERSION_NODE is the node that is versioned.
1657 It returns the new version's cgraph node.
1658 ARGS_TO_SKIP lists arguments to be omitted from functions
1659 */
1660
1661 struct cgraph_node *
1662 cgraph_function_versioning (struct cgraph_node *old_version_node,
1663 VEC(cgraph_edge_p,heap) *redirect_callers,
1664 VEC (ipa_replace_map_p,gc)* tree_map,
1665 bitmap args_to_skip)
1666 {
1667 tree old_decl = old_version_node->decl;
1668 struct cgraph_node *new_version_node = NULL;
1669 tree new_decl;
1670
1671 if (!tree_versionable_function_p (old_decl))
1672 return NULL;
1673
1674 /* Make a new FUNCTION_DECL tree node for the
1675 new version. */
1676 if (!args_to_skip)
1677 new_decl = copy_node (old_decl);
1678 else
1679 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
1680
1681 /* Create the new version's call-graph node.
1682 and update the edges of the new node. */
1683 new_version_node =
1684 cgraph_copy_node_for_versioning (old_version_node, new_decl,
1685 redirect_callers);
1686
1687 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1688 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip);
1689
1690 /* Update the new version's properties.
1691 Make The new version visible only within this translation unit. Make sure
1692 that is not weak also.
1693 ??? We cannot use COMDAT linkage because there is no
1694 ABI support for this. */
1695 DECL_EXTERNAL (new_version_node->decl) = 0;
1696 DECL_COMDAT_GROUP (new_version_node->decl) = NULL_TREE;
1697 TREE_PUBLIC (new_version_node->decl) = 0;
1698 DECL_COMDAT (new_version_node->decl) = 0;
1699 DECL_WEAK (new_version_node->decl) = 0;
1700 DECL_VIRTUAL_P (new_version_node->decl) = 0;
1701 new_version_node->local.externally_visible = 0;
1702 new_version_node->local.local = 1;
1703 new_version_node->lowered = true;
1704
1705 /* Update the call_expr on the edges to call the new version node. */
1706 update_call_expr (new_version_node);
1707
1708 cgraph_call_function_insertion_hooks (new_version_node);
1709 return new_version_node;
1710 }
1711
1712 /* Produce separate function body for inline clones so the offline copy can be
1713 modified without affecting them. */
1714 struct cgraph_node *
1715 save_inline_function_body (struct cgraph_node *node)
1716 {
1717 struct cgraph_node *first_clone, *n;
1718
1719 gcc_assert (node == cgraph_node (node->decl));
1720
1721 cgraph_lower_function (node);
1722
1723 first_clone = node->clones;
1724
1725 first_clone->decl = copy_node (node->decl);
1726 cgraph_insert_node_to_hashtable (first_clone);
1727 gcc_assert (first_clone == cgraph_node (first_clone->decl));
1728 if (first_clone->next_sibling_clone)
1729 {
1730 for (n = first_clone->next_sibling_clone; n->next_sibling_clone; n = n->next_sibling_clone)
1731 n->clone_of = first_clone;
1732 n->clone_of = first_clone;
1733 n->next_sibling_clone = first_clone->clones;
1734 if (first_clone->clones)
1735 first_clone->clones->prev_sibling_clone = n;
1736 first_clone->clones = first_clone->next_sibling_clone;
1737 first_clone->next_sibling_clone->prev_sibling_clone = NULL;
1738 first_clone->next_sibling_clone = NULL;
1739 gcc_assert (!first_clone->prev_sibling_clone);
1740 }
1741 first_clone->clone_of = NULL;
1742 node->clones = NULL;
1743
1744 if (first_clone->clones)
1745 for (n = first_clone->clones; n != first_clone;)
1746 {
1747 gcc_assert (n->decl == node->decl);
1748 n->decl = first_clone->decl;
1749 if (n->clones)
1750 n = n->clones;
1751 else if (n->next_sibling_clone)
1752 n = n->next_sibling_clone;
1753 else
1754 {
1755 while (n != first_clone && !n->next_sibling_clone)
1756 n = n->clone_of;
1757 if (n != first_clone)
1758 n = n->next_sibling_clone;
1759 }
1760 }
1761
1762 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1763 tree_function_versioning (node->decl, first_clone->decl, NULL, true, NULL);
1764
1765 DECL_EXTERNAL (first_clone->decl) = 0;
1766 DECL_COMDAT_GROUP (first_clone->decl) = NULL_TREE;
1767 TREE_PUBLIC (first_clone->decl) = 0;
1768 DECL_COMDAT (first_clone->decl) = 0;
1769 VEC_free (ipa_opt_pass, heap,
1770 DECL_STRUCT_FUNCTION (first_clone->decl)->ipa_transforms_to_apply);
1771 DECL_STRUCT_FUNCTION (first_clone->decl)->ipa_transforms_to_apply = NULL;
1772
1773 #ifdef ENABLE_CHECKING
1774 verify_cgraph_node (first_clone);
1775 #endif
1776 return first_clone;
1777 }
1778
1779 /* Given virtual clone, turn it into actual clone. */
1780 static void
1781 cgraph_materialize_clone (struct cgraph_node *node)
1782 {
1783 bitmap_obstack_initialize (NULL);
1784 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1785 tree_function_versioning (node->clone_of->decl, node->decl,
1786 node->clone.tree_map, true,
1787 node->clone.args_to_skip);
1788 if (cgraph_dump_file)
1789 {
1790 dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
1791 dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
1792 }
1793
1794 /* Function is no longer clone. */
1795 if (node->next_sibling_clone)
1796 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
1797 if (node->prev_sibling_clone)
1798 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
1799 else
1800 node->clone_of->clones = node->next_sibling_clone;
1801 node->next_sibling_clone = NULL;
1802 node->prev_sibling_clone = NULL;
1803 node->clone_of = NULL;
1804 bitmap_obstack_release (NULL);
1805 }
1806
1807 /* Once all functions from compilation unit are in memory, produce all clones
1808 and update all calls.
1809 We might also do this on demand if we don't want to bring all functions to
1810 memory prior compilation, but current WHOPR implementation does that and it is
1811 is bit easier to keep everything right in this order. */
1812 void
1813 cgraph_materialize_all_clones (void)
1814 {
1815 struct cgraph_node *node;
1816 bool stabilized = false;
1817
1818 if (cgraph_dump_file)
1819 fprintf (cgraph_dump_file, "Materializing clones\n");
1820 #ifdef ENABLE_CHECKING
1821 verify_cgraph ();
1822 #endif
1823
1824 /* We can also do topological order, but number of iterations should be
1825 bounded by number of IPA passes since single IPA pass is probably not
1826 going to create clones of clones it created itself. */
1827 while (!stabilized)
1828 {
1829 stabilized = true;
1830 for (node = cgraph_nodes; node; node = node->next)
1831 {
1832 if (node->clone_of && node->decl != node->clone_of->decl
1833 && !gimple_has_body_p (node->decl))
1834 {
1835 if (gimple_has_body_p (node->clone_of->decl))
1836 {
1837 if (cgraph_dump_file)
1838 {
1839 fprintf (cgraph_dump_file, "clonning %s to %s\n",
1840 cgraph_node_name (node->clone_of),
1841 cgraph_node_name (node));
1842 if (node->clone.tree_map)
1843 {
1844 unsigned int i;
1845 fprintf (cgraph_dump_file, " replace map: ");
1846 for (i = 0; i < VEC_length (ipa_replace_map_p,
1847 node->clone.tree_map);
1848 i++)
1849 {
1850 struct ipa_replace_map *replace_info;
1851 replace_info = VEC_index (ipa_replace_map_p,
1852 node->clone.tree_map,
1853 i);
1854 print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
1855 fprintf (cgraph_dump_file, " -> ");
1856 print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
1857 fprintf (cgraph_dump_file, "%s%s;",
1858 replace_info->replace_p ? "(replace)":"",
1859 replace_info->ref_p ? "(ref)":"");
1860 }
1861 fprintf (cgraph_dump_file, "\n");
1862 }
1863 if (node->clone.args_to_skip)
1864 {
1865 fprintf (cgraph_dump_file, " args_to_skip: ");
1866 dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
1867 }
1868 if (node->clone.args_to_skip)
1869 {
1870 fprintf (cgraph_dump_file, " combined_args_to_skip:");
1871 dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
1872 }
1873 }
1874 cgraph_materialize_clone (node);
1875 }
1876 else
1877 stabilized = false;
1878 }
1879 }
1880 }
1881 if (cgraph_dump_file)
1882 fprintf (cgraph_dump_file, "Updating call sites\n");
1883 for (node = cgraph_nodes; node; node = node->next)
1884 if (node->analyzed && gimple_has_body_p (node->decl)
1885 && (!node->clone_of || node->clone_of->decl != node->decl))
1886 {
1887 struct cgraph_edge *e;
1888
1889 current_function_decl = node->decl;
1890 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
1891 for (e = node->callees; e; e = e->next_callee)
1892 {
1893 tree decl = gimple_call_fndecl (e->call_stmt);
1894 /* When function gets inlined, indirect inlining might've invented
1895 new edge for orginally indirect stmt. Since we are not
1896 preserving clones in the original form, we must not update here
1897 since other inline clones don't need to contain call to the same
1898 call. Inliner will do the substitution for us later. */
1899 if (decl && decl != e->callee->decl)
1900 {
1901 gimple new_stmt;
1902 gimple_stmt_iterator gsi;
1903
1904 if (cgraph_dump_file)
1905 {
1906 fprintf (cgraph_dump_file, "updating call of %s in %s:",
1907 cgraph_node_name (node),
1908 cgraph_node_name (e->callee));
1909 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
1910 }
1911
1912 if (e->callee->clone.combined_args_to_skip)
1913 new_stmt = gimple_call_copy_skip_args (e->call_stmt,
1914 e->callee->clone.combined_args_to_skip);
1915 else
1916 new_stmt = e->call_stmt;
1917 if (gimple_vdef (new_stmt)
1918 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1919 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1920 gimple_call_set_fndecl (new_stmt, e->callee->decl);
1921
1922 gsi = gsi_for_stmt (e->call_stmt);
1923 gsi_replace (&gsi, new_stmt, true);
1924
1925 /* Update EH information too, just in case. */
1926 maybe_clean_or_replace_eh_stmt (e->call_stmt, new_stmt);
1927
1928 cgraph_set_call_stmt_including_clones (node, e->call_stmt, new_stmt);
1929
1930 if (cgraph_dump_file)
1931 {
1932 fprintf (cgraph_dump_file, " updated to:");
1933 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
1934 }
1935 }
1936 }
1937 pop_cfun ();
1938 current_function_decl = NULL;
1939 #ifdef ENABLE_CHECKING
1940 verify_cgraph_node (node);
1941 #endif
1942 }
1943 #ifdef ENABLE_CHECKING
1944 verify_cgraph ();
1945 #endif
1946 cgraph_remove_unreachable_nodes (false, cgraph_dump_file);
1947 }
1948
1949 #include "gt-cgraphunit.h"