cgraph.c (cgraph_remove_node): Kill bodies in other partitoin.
[gcc.git] / gcc / cgraphunit.c
1 /* Callgraph based interprocedural optimizations.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This module implements main driver of compilation process as well as
23 few basic interprocedural optimizers.
24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
35 (There is one exception needed for implementing GCC extern inline
36 function.)
37
38 - varpool_finalize_variable
39
40 This function has same behavior as the above but is used for static
41 variables.
42
43 - cgraph_finalize_compilation_unit
44
45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
47
48 In the the call-graph construction and local function
49 analysis takes place here. Bodies of unreachable functions are released
50 to conserve memory usage.
51
52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
54
55 - cgraph_optimize
56
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
61
62 - cgraph_mark_needed_node
63 - varpool_mark_needed_node
64
65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
69 used by C++ frontend to explicitly mark the keyed methods.
70
71 - analyze_expr callback
72
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
76
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
79
80 Analyzing of all functions is deferred
81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
82
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
90
91 The intra-procedural information is produced and its existence
92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
95
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
98
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
103
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
106 where reference has been optimized out. */
107
108
109 #include "config.h"
110 #include "system.h"
111 #include "coretypes.h"
112 #include "tm.h"
113 #include "tree.h"
114 #include "rtl.h"
115 #include "tree-flow.h"
116 #include "tree-inline.h"
117 #include "langhooks.h"
118 #include "pointer-set.h"
119 #include "toplev.h"
120 #include "flags.h"
121 #include "ggc.h"
122 #include "debug.h"
123 #include "target.h"
124 #include "cgraph.h"
125 #include "diagnostic.h"
126 #include "timevar.h"
127 #include "params.h"
128 #include "fibheap.h"
129 #include "intl.h"
130 #include "function.h"
131 #include "ipa-prop.h"
132 #include "gimple.h"
133 #include "tree-iterator.h"
134 #include "tree-pass.h"
135 #include "tree-dump.h"
136 #include "output.h"
137 #include "coverage.h"
138 #include "plugin.h"
139
140 static void cgraph_expand_all_functions (void);
141 static void cgraph_mark_functions_to_output (void);
142 static void cgraph_expand_function (struct cgraph_node *);
143 static void cgraph_output_pending_asms (void);
144 static void cgraph_analyze_function (struct cgraph_node *);
145
146 static FILE *cgraph_dump_file;
147
148 /* A vector of FUNCTION_DECLs declared as static constructors. */
149 static GTY (()) VEC(tree, gc) *static_ctors;
150 /* A vector of FUNCTION_DECLs declared as static destructors. */
151 static GTY (()) VEC(tree, gc) *static_dtors;
152
153 /* Used for vtable lookup in thunk adjusting. */
154 static GTY (()) tree vtable_entry_type;
155
156 /* When target does not have ctors and dtors, we call all constructor
157 and destructor by special initialization/destruction function
158 recognized by collect2.
159
160 When we are going to build this function, collect all constructors and
161 destructors and turn them into normal functions. */
162
163 static void
164 record_cdtor_fn (tree fndecl)
165 {
166 struct cgraph_node *node;
167 if (targetm.have_ctors_dtors
168 || (!DECL_STATIC_CONSTRUCTOR (fndecl)
169 && !DECL_STATIC_DESTRUCTOR (fndecl)))
170 return;
171
172 if (DECL_STATIC_CONSTRUCTOR (fndecl))
173 {
174 VEC_safe_push (tree, gc, static_ctors, fndecl);
175 DECL_STATIC_CONSTRUCTOR (fndecl) = 0;
176 }
177 if (DECL_STATIC_DESTRUCTOR (fndecl))
178 {
179 VEC_safe_push (tree, gc, static_dtors, fndecl);
180 DECL_STATIC_DESTRUCTOR (fndecl) = 0;
181 }
182 node = cgraph_node (fndecl);
183 node->local.disregard_inline_limits = 1;
184 cgraph_mark_reachable_node (node);
185 }
186
187 /* Define global constructors/destructor functions for the CDTORS, of
188 which they are LEN. The CDTORS are sorted by initialization
189 priority. If CTOR_P is true, these are constructors; otherwise,
190 they are destructors. */
191
192 static void
193 build_cdtor (bool ctor_p, tree *cdtors, size_t len)
194 {
195 size_t i;
196
197 i = 0;
198 while (i < len)
199 {
200 tree body;
201 tree fn;
202 priority_type priority;
203
204 priority = 0;
205 body = NULL_TREE;
206 /* Find the next batch of constructors/destructors with the same
207 initialization priority. */
208 do
209 {
210 priority_type p;
211 fn = cdtors[i];
212 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
213 if (!body)
214 priority = p;
215 else if (p != priority)
216 break;
217 append_to_statement_list (build_function_call_expr (UNKNOWN_LOCATION,
218 fn, 0),
219 &body);
220 ++i;
221 }
222 while (i < len);
223 gcc_assert (body != NULL_TREE);
224 /* Generate a function to call all the function of like
225 priority. */
226 cgraph_build_static_cdtor (ctor_p ? 'I' : 'D', body, priority);
227 }
228 }
229
230 /* Comparison function for qsort. P1 and P2 are actually of type
231 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
232 used to determine the sort order. */
233
234 static int
235 compare_ctor (const void *p1, const void *p2)
236 {
237 tree f1;
238 tree f2;
239 int priority1;
240 int priority2;
241
242 f1 = *(const tree *)p1;
243 f2 = *(const tree *)p2;
244 priority1 = DECL_INIT_PRIORITY (f1);
245 priority2 = DECL_INIT_PRIORITY (f2);
246
247 if (priority1 < priority2)
248 return -1;
249 else if (priority1 > priority2)
250 return 1;
251 else
252 /* Ensure a stable sort. */
253 return (const tree *)p1 - (const tree *)p2;
254 }
255
256 /* Comparison function for qsort. P1 and P2 are actually of type
257 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
258 used to determine the sort order. */
259
260 static int
261 compare_dtor (const void *p1, const void *p2)
262 {
263 tree f1;
264 tree f2;
265 int priority1;
266 int priority2;
267
268 f1 = *(const tree *)p1;
269 f2 = *(const tree *)p2;
270 priority1 = DECL_FINI_PRIORITY (f1);
271 priority2 = DECL_FINI_PRIORITY (f2);
272
273 if (priority1 < priority2)
274 return -1;
275 else if (priority1 > priority2)
276 return 1;
277 else
278 /* Ensure a stable sort. */
279 return (const tree *)p1 - (const tree *)p2;
280 }
281
282 /* Generate functions to call static constructors and destructors
283 for targets that do not support .ctors/.dtors sections. These
284 functions have magic names which are detected by collect2. */
285
286 static void
287 cgraph_build_cdtor_fns (void)
288 {
289 if (!VEC_empty (tree, static_ctors))
290 {
291 gcc_assert (!targetm.have_ctors_dtors);
292 qsort (VEC_address (tree, static_ctors),
293 VEC_length (tree, static_ctors),
294 sizeof (tree),
295 compare_ctor);
296 build_cdtor (/*ctor_p=*/true,
297 VEC_address (tree, static_ctors),
298 VEC_length (tree, static_ctors));
299 VEC_truncate (tree, static_ctors, 0);
300 }
301
302 if (!VEC_empty (tree, static_dtors))
303 {
304 gcc_assert (!targetm.have_ctors_dtors);
305 qsort (VEC_address (tree, static_dtors),
306 VEC_length (tree, static_dtors),
307 sizeof (tree),
308 compare_dtor);
309 build_cdtor (/*ctor_p=*/false,
310 VEC_address (tree, static_dtors),
311 VEC_length (tree, static_dtors));
312 VEC_truncate (tree, static_dtors, 0);
313 }
314 }
315
316 /* Determine if function DECL is needed. That is, visible to something
317 either outside this translation unit, something magic in the system
318 configury. */
319
320 bool
321 cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
322 {
323 /* If the user told us it is used, then it must be so. */
324 if (node->local.externally_visible)
325 return true;
326
327 /* ??? If the assembler name is set by hand, it is possible to assemble
328 the name later after finalizing the function and the fact is noticed
329 in assemble_name then. This is arguably a bug. */
330 if (DECL_ASSEMBLER_NAME_SET_P (decl)
331 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
332 return true;
333
334 /* With -fkeep-inline-functions we are keeping all inline functions except
335 for extern inline ones. */
336 if (flag_keep_inline_functions
337 && DECL_DECLARED_INLINE_P (decl)
338 && !DECL_EXTERNAL (decl)
339 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl)))
340 return true;
341
342 /* If we decided it was needed before, but at the time we didn't have
343 the body of the function available, then it's still needed. We have
344 to go back and re-check its dependencies now. */
345 if (node->needed)
346 return true;
347
348 /* Externally visible functions must be output. The exception is
349 COMDAT functions that must be output only when they are needed.
350
351 When not optimizing, also output the static functions. (see
352 PR24561), but don't do so for always_inline functions, functions
353 declared inline and nested functions. These was optimized out
354 in the original implementation and it is unclear whether we want
355 to change the behavior here. */
356 if (((TREE_PUBLIC (decl)
357 || (!optimize && !node->local.disregard_inline_limits
358 && !DECL_DECLARED_INLINE_P (decl)
359 && !node->origin))
360 && !flag_whole_program
361 && !flag_lto
362 && !flag_whopr)
363 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
364 return true;
365
366 /* Constructors and destructors are reachable from the runtime by
367 some mechanism. */
368 if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
369 return true;
370
371 return false;
372 }
373
374 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
375 functions into callgraph in a way so they look like ordinary reachable
376 functions inserted into callgraph already at construction time. */
377
378 bool
379 cgraph_process_new_functions (void)
380 {
381 bool output = false;
382 tree fndecl;
383 struct cgraph_node *node;
384
385 /* Note that this queue may grow as its being processed, as the new
386 functions may generate new ones. */
387 while (cgraph_new_nodes)
388 {
389 node = cgraph_new_nodes;
390 fndecl = node->decl;
391 cgraph_new_nodes = cgraph_new_nodes->next_needed;
392 switch (cgraph_state)
393 {
394 case CGRAPH_STATE_CONSTRUCTION:
395 /* At construction time we just need to finalize function and move
396 it into reachable functions list. */
397
398 node->next_needed = NULL;
399 cgraph_finalize_function (fndecl, false);
400 cgraph_mark_reachable_node (node);
401 output = true;
402 break;
403
404 case CGRAPH_STATE_IPA:
405 case CGRAPH_STATE_IPA_SSA:
406 /* When IPA optimization already started, do all essential
407 transformations that has been already performed on the whole
408 cgraph but not on this function. */
409
410 gimple_register_cfg_hooks ();
411 if (!node->analyzed)
412 cgraph_analyze_function (node);
413 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
414 current_function_decl = fndecl;
415 compute_inline_parameters (node);
416 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
417 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
418 /* When not optimizing, be sure we run early local passes anyway
419 to expand OMP. */
420 || !optimize)
421 execute_pass_list (pass_early_local_passes.pass.sub);
422 free_dominance_info (CDI_POST_DOMINATORS);
423 free_dominance_info (CDI_DOMINATORS);
424 pop_cfun ();
425 current_function_decl = NULL;
426 break;
427
428 case CGRAPH_STATE_EXPANSION:
429 /* Functions created during expansion shall be compiled
430 directly. */
431 node->process = 0;
432 cgraph_expand_function (node);
433 break;
434
435 default:
436 gcc_unreachable ();
437 break;
438 }
439 cgraph_call_function_insertion_hooks (node);
440 }
441 return output;
442 }
443
444 /* As an GCC extension we allow redefinition of the function. The
445 semantics when both copies of bodies differ is not well defined.
446 We replace the old body with new body so in unit at a time mode
447 we always use new body, while in normal mode we may end up with
448 old body inlined into some functions and new body expanded and
449 inlined in others.
450
451 ??? It may make more sense to use one body for inlining and other
452 body for expanding the function but this is difficult to do. */
453
454 static void
455 cgraph_reset_node (struct cgraph_node *node)
456 {
457 /* If node->process is set, then we have already begun whole-unit analysis.
458 This is *not* testing for whether we've already emitted the function.
459 That case can be sort-of legitimately seen with real function redefinition
460 errors. I would argue that the front end should never present us with
461 such a case, but don't enforce that for now. */
462 gcc_assert (!node->process);
463
464 /* Reset our data structures so we can analyze the function again. */
465 memset (&node->local, 0, sizeof (node->local));
466 memset (&node->global, 0, sizeof (node->global));
467 memset (&node->rtl, 0, sizeof (node->rtl));
468 node->analyzed = false;
469 node->local.redefined_extern_inline = true;
470 node->local.finalized = false;
471
472 cgraph_node_remove_callees (node);
473
474 /* We may need to re-queue the node for assembling in case
475 we already proceeded it and ignored as not needed or got
476 a re-declaration in IMA mode. */
477 if (node->reachable)
478 {
479 struct cgraph_node *n;
480
481 for (n = cgraph_nodes_queue; n; n = n->next_needed)
482 if (n == node)
483 break;
484 if (!n)
485 node->reachable = 0;
486 }
487 }
488
489 static void
490 cgraph_lower_function (struct cgraph_node *node)
491 {
492 if (node->lowered)
493 return;
494
495 if (node->nested)
496 lower_nested_functions (node->decl);
497 gcc_assert (!node->nested);
498
499 tree_lowering_passes (node->decl);
500 node->lowered = true;
501 }
502
503 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
504 logic in effect. If NESTED is true, then our caller cannot stand to have
505 the garbage collector run at the moment. We would need to either create
506 a new GC context, or just not compile right now. */
507
508 void
509 cgraph_finalize_function (tree decl, bool nested)
510 {
511 struct cgraph_node *node = cgraph_node (decl);
512
513 if (node->local.finalized)
514 cgraph_reset_node (node);
515
516 node->pid = cgraph_max_pid ++;
517 notice_global_symbol (decl);
518 node->local.finalized = true;
519 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
520 node->finalized_by_frontend = true;
521 record_cdtor_fn (node->decl);
522
523 if (cgraph_decide_is_function_needed (node, decl))
524 cgraph_mark_needed_node (node);
525
526 /* Since we reclaim unreachable nodes at the end of every language
527 level unit, we need to be conservative about possible entry points
528 there. */
529 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)))
530 cgraph_mark_reachable_node (node);
531
532 /* If we've not yet emitted decl, tell the debug info about it. */
533 if (!TREE_ASM_WRITTEN (decl))
534 (*debug_hooks->deferred_inline_function) (decl);
535
536 /* Possibly warn about unused parameters. */
537 if (warn_unused_parameter)
538 do_warn_unused_parameter (decl);
539
540 if (!nested)
541 ggc_collect ();
542 }
543
544 /* C99 extern inline keywords allow changing of declaration after function
545 has been finalized. We need to re-decide if we want to mark the function as
546 needed then. */
547
548 void
549 cgraph_mark_if_needed (tree decl)
550 {
551 struct cgraph_node *node = cgraph_node (decl);
552 if (node->local.finalized && cgraph_decide_is_function_needed (node, decl))
553 cgraph_mark_needed_node (node);
554 }
555
556 /* Return TRUE if NODE2 is equivalent to NODE or its clone. */
557 static bool
558 clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
559 {
560 while (node != node2 && node2)
561 node2 = node2->clone_of;
562 return node2 != NULL;
563 }
564
565 /* Verify cgraph nodes of given cgraph node. */
566 void
567 verify_cgraph_node (struct cgraph_node *node)
568 {
569 struct cgraph_edge *e;
570 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
571 struct function *saved_cfun = cfun;
572 basic_block this_block;
573 gimple_stmt_iterator gsi;
574 bool error_found = false;
575
576 if (errorcount || sorrycount)
577 return;
578
579 timevar_push (TV_CGRAPH_VERIFY);
580 /* debug_generic_stmt needs correct cfun */
581 set_cfun (this_cfun);
582 for (e = node->callees; e; e = e->next_callee)
583 if (e->aux)
584 {
585 error ("aux field set for edge %s->%s",
586 identifier_to_locale (cgraph_node_name (e->caller)),
587 identifier_to_locale (cgraph_node_name (e->callee)));
588 error_found = true;
589 }
590 if (node->count < 0)
591 {
592 error ("Execution count is negative");
593 error_found = true;
594 }
595 if (node->global.inlined_to && node->local.externally_visible)
596 {
597 error ("Externally visible inline clone");
598 error_found = true;
599 }
600 if (node->global.inlined_to && node->address_taken)
601 {
602 error ("Inline clone with address taken");
603 error_found = true;
604 }
605 if (node->global.inlined_to && node->needed)
606 {
607 error ("Inline clone is needed");
608 error_found = true;
609 }
610 for (e = node->callers; e; e = e->next_caller)
611 {
612 if (e->count < 0)
613 {
614 error ("caller edge count is negative");
615 error_found = true;
616 }
617 if (e->frequency < 0)
618 {
619 error ("caller edge frequency is negative");
620 error_found = true;
621 }
622 if (e->frequency > CGRAPH_FREQ_MAX)
623 {
624 error ("caller edge frequency is too large");
625 error_found = true;
626 }
627 if (gimple_has_body_p (e->caller->decl)
628 && !e->caller->global.inlined_to
629 && (e->frequency
630 != compute_call_stmt_bb_frequency (e->caller->decl,
631 gimple_bb (e->call_stmt))))
632 {
633 error ("caller edge frequency %i does not match BB freqency %i",
634 e->frequency,
635 compute_call_stmt_bb_frequency (e->caller->decl,
636 gimple_bb (e->call_stmt)));
637 error_found = true;
638 }
639 if (!e->inline_failed)
640 {
641 if (node->global.inlined_to
642 != (e->caller->global.inlined_to
643 ? e->caller->global.inlined_to : e->caller))
644 {
645 error ("inlined_to pointer is wrong");
646 error_found = true;
647 }
648 if (node->callers->next_caller)
649 {
650 error ("multiple inline callers");
651 error_found = true;
652 }
653 }
654 else
655 if (node->global.inlined_to)
656 {
657 error ("inlined_to pointer set for noninline callers");
658 error_found = true;
659 }
660 }
661 if (!node->callers && node->global.inlined_to)
662 {
663 error ("inlined_to pointer is set but no predecessors found");
664 error_found = true;
665 }
666 if (node->global.inlined_to == node)
667 {
668 error ("inlined_to pointer refers to itself");
669 error_found = true;
670 }
671
672 if (!cgraph_node (node->decl))
673 {
674 error ("node not found in cgraph_hash");
675 error_found = true;
676 }
677
678 if (node->clone_of)
679 {
680 struct cgraph_node *n;
681 for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
682 if (n == node)
683 break;
684 if (!n)
685 {
686 error ("node has wrong clone_of");
687 error_found = true;
688 }
689 }
690 if (node->clones)
691 {
692 struct cgraph_node *n;
693 for (n = node->clones; n; n = n->next_sibling_clone)
694 if (n->clone_of != node)
695 break;
696 if (n)
697 {
698 error ("node has wrong clone list");
699 error_found = true;
700 }
701 }
702 if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
703 {
704 error ("node is in clone list but it is not clone");
705 error_found = true;
706 }
707 if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
708 {
709 error ("node has wrong prev_clone pointer");
710 error_found = true;
711 }
712 if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
713 {
714 error ("double linked list of clones corrupted");
715 error_found = true;
716 }
717
718 if (node->analyzed && gimple_has_body_p (node->decl)
719 && !TREE_ASM_WRITTEN (node->decl)
720 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
721 && !flag_wpa)
722 {
723 if (this_cfun->cfg)
724 {
725 /* The nodes we're interested in are never shared, so walk
726 the tree ignoring duplicates. */
727 struct pointer_set_t *visited_nodes = pointer_set_create ();
728 /* Reach the trees by walking over the CFG, and note the
729 enclosing basic-blocks in the call edges. */
730 FOR_EACH_BB_FN (this_block, this_cfun)
731 for (gsi = gsi_start_bb (this_block);
732 !gsi_end_p (gsi);
733 gsi_next (&gsi))
734 {
735 gimple stmt = gsi_stmt (gsi);
736 tree decl;
737 if (is_gimple_call (stmt) && (decl = gimple_call_fndecl (stmt)))
738 {
739 struct cgraph_edge *e = cgraph_edge (node, stmt);
740 if (e)
741 {
742 if (e->aux)
743 {
744 error ("shared call_stmt:");
745 debug_gimple_stmt (stmt);
746 error_found = true;
747 }
748 if (e->callee->same_body_alias)
749 {
750 error ("edge points to same body alias:");
751 debug_tree (e->callee->decl);
752 error_found = true;
753 }
754 else if (!node->global.inlined_to
755 && !e->callee->global.inlined_to
756 && !clone_of_p (cgraph_node (decl), e->callee))
757 {
758 error ("edge points to wrong declaration:");
759 debug_tree (e->callee->decl);
760 fprintf (stderr," Instead of:");
761 debug_tree (decl);
762 error_found = true;
763 }
764 e->aux = (void *)1;
765 }
766 else
767 {
768 error ("missing callgraph edge for call stmt:");
769 debug_gimple_stmt (stmt);
770 error_found = true;
771 }
772 }
773 }
774 pointer_set_destroy (visited_nodes);
775 }
776 else
777 /* No CFG available?! */
778 gcc_unreachable ();
779
780 for (e = node->callees; e; e = e->next_callee)
781 {
782 if (!e->aux && !e->indirect_call)
783 {
784 error ("edge %s->%s has no corresponding call_stmt",
785 identifier_to_locale (cgraph_node_name (e->caller)),
786 identifier_to_locale (cgraph_node_name (e->callee)));
787 debug_gimple_stmt (e->call_stmt);
788 error_found = true;
789 }
790 e->aux = 0;
791 }
792 }
793 if (error_found)
794 {
795 dump_cgraph_node (stderr, node);
796 internal_error ("verify_cgraph_node failed");
797 }
798 set_cfun (saved_cfun);
799 timevar_pop (TV_CGRAPH_VERIFY);
800 }
801
802 /* Verify whole cgraph structure. */
803 void
804 verify_cgraph (void)
805 {
806 struct cgraph_node *node;
807
808 if (sorrycount || errorcount)
809 return;
810
811 for (node = cgraph_nodes; node; node = node->next)
812 verify_cgraph_node (node);
813 }
814
815 /* Output all asm statements we have stored up to be output. */
816
817 static void
818 cgraph_output_pending_asms (void)
819 {
820 struct cgraph_asm_node *can;
821
822 if (errorcount || sorrycount)
823 return;
824
825 for (can = cgraph_asm_nodes; can; can = can->next)
826 assemble_asm (can->asm_str);
827 cgraph_asm_nodes = NULL;
828 }
829
830 /* Analyze the function scheduled to be output. */
831 static void
832 cgraph_analyze_function (struct cgraph_node *node)
833 {
834 tree save = current_function_decl;
835 tree decl = node->decl;
836
837 current_function_decl = decl;
838 push_cfun (DECL_STRUCT_FUNCTION (decl));
839
840 assign_assembler_name_if_neeeded (node->decl);
841
842 /* Make sure to gimplify bodies only once. During analyzing a
843 function we lower it, which will require gimplified nested
844 functions, so we can end up here with an already gimplified
845 body. */
846 if (!gimple_body (decl))
847 gimplify_function_tree (decl);
848 dump_function (TDI_generic, decl);
849
850 cgraph_lower_function (node);
851 node->analyzed = true;
852
853 pop_cfun ();
854 current_function_decl = save;
855 }
856
857 /* Look for externally_visible and used attributes and mark cgraph nodes
858 accordingly.
859
860 We cannot mark the nodes at the point the attributes are processed (in
861 handle_*_attribute) because the copy of the declarations available at that
862 point may not be canonical. For example, in:
863
864 void f();
865 void f() __attribute__((used));
866
867 the declaration we see in handle_used_attribute will be the second
868 declaration -- but the front end will subsequently merge that declaration
869 with the original declaration and discard the second declaration.
870
871 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
872
873 void f() {}
874 void f() __attribute__((externally_visible));
875
876 is valid.
877
878 So, we walk the nodes at the end of the translation unit, applying the
879 attributes at that point. */
880
881 static void
882 process_function_and_variable_attributes (struct cgraph_node *first,
883 struct varpool_node *first_var)
884 {
885 struct cgraph_node *node;
886 struct varpool_node *vnode;
887
888 for (node = cgraph_nodes; node != first; node = node->next)
889 {
890 tree decl = node->decl;
891 if (DECL_PRESERVE_P (decl))
892 {
893 mark_decl_referenced (decl);
894 if (node->local.finalized)
895 cgraph_mark_needed_node (node);
896 }
897 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
898 {
899 if (! TREE_PUBLIC (node->decl))
900 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
901 "%<externally_visible%>"
902 " attribute have effect only on public objects");
903 else if (node->local.finalized)
904 cgraph_mark_needed_node (node);
905 }
906 }
907 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
908 {
909 tree decl = vnode->decl;
910 if (DECL_PRESERVE_P (decl))
911 {
912 mark_decl_referenced (decl);
913 vnode->force_output = true;
914 if (vnode->finalized)
915 varpool_mark_needed_node (vnode);
916 }
917 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
918 {
919 if (! TREE_PUBLIC (vnode->decl))
920 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
921 "%<externally_visible%>"
922 " attribute have effect only on public objects");
923 else if (vnode->finalized)
924 varpool_mark_needed_node (vnode);
925 }
926 }
927 }
928
929 /* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
930 each reachable functions) and build cgraph.
931 The function can be called multiple times after inserting new nodes
932 into beginning of queue. Just the new part of queue is re-scanned then. */
933
934 static void
935 cgraph_analyze_functions (void)
936 {
937 /* Keep track of already processed nodes when called multiple times for
938 intermodule optimization. */
939 static struct cgraph_node *first_analyzed;
940 struct cgraph_node *first_processed = first_analyzed;
941 static struct varpool_node *first_analyzed_var;
942 struct cgraph_node *node, *next;
943
944 process_function_and_variable_attributes (first_processed,
945 first_analyzed_var);
946 first_processed = cgraph_nodes;
947 first_analyzed_var = varpool_nodes;
948 varpool_analyze_pending_decls ();
949 if (cgraph_dump_file)
950 {
951 fprintf (cgraph_dump_file, "Initial entry points:");
952 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
953 if (node->needed)
954 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
955 fprintf (cgraph_dump_file, "\n");
956 }
957 cgraph_process_new_functions ();
958
959 /* Propagate reachability flag and lower representation of all reachable
960 functions. In the future, lowering will introduce new functions and
961 new entry points on the way (by template instantiation and virtual
962 method table generation for instance). */
963 while (cgraph_nodes_queue)
964 {
965 struct cgraph_edge *edge;
966 tree decl = cgraph_nodes_queue->decl;
967
968 node = cgraph_nodes_queue;
969 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
970 node->next_needed = NULL;
971
972 /* ??? It is possible to create extern inline function and later using
973 weak alias attribute to kill its body. See
974 gcc.c-torture/compile/20011119-1.c */
975 if (!DECL_STRUCT_FUNCTION (decl))
976 {
977 cgraph_reset_node (node);
978 continue;
979 }
980
981 if (!node->analyzed)
982 cgraph_analyze_function (node);
983
984 for (edge = node->callees; edge; edge = edge->next_callee)
985 if (!edge->callee->reachable)
986 cgraph_mark_reachable_node (edge->callee);
987
988 if (node->same_comdat_group)
989 {
990 for (next = node->same_comdat_group;
991 next != node;
992 next = next->same_comdat_group)
993 cgraph_mark_reachable_node (next);
994 }
995
996 /* If decl is a clone of an abstract function, mark that abstract
997 function so that we don't release its body. The DECL_INITIAL() of that
998 abstract function declaration will be later needed to output debug info. */
999 if (DECL_ABSTRACT_ORIGIN (decl))
1000 {
1001 struct cgraph_node *origin_node = cgraph_node (DECL_ABSTRACT_ORIGIN (decl));
1002 origin_node->abstract_and_needed = true;
1003 }
1004
1005 /* We finalize local static variables during constructing callgraph
1006 edges. Process their attributes too. */
1007 process_function_and_variable_attributes (first_processed,
1008 first_analyzed_var);
1009 first_processed = cgraph_nodes;
1010 first_analyzed_var = varpool_nodes;
1011 varpool_analyze_pending_decls ();
1012 cgraph_process_new_functions ();
1013 }
1014
1015 /* Collect entry points to the unit. */
1016 if (cgraph_dump_file)
1017 {
1018 fprintf (cgraph_dump_file, "Unit entry points:");
1019 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1020 if (node->needed)
1021 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1022 fprintf (cgraph_dump_file, "\n\nInitial ");
1023 dump_cgraph (cgraph_dump_file);
1024 }
1025
1026 if (cgraph_dump_file)
1027 fprintf (cgraph_dump_file, "\nReclaiming functions:");
1028
1029 for (node = cgraph_nodes; node != first_analyzed; node = next)
1030 {
1031 tree decl = node->decl;
1032 next = node->next;
1033
1034 if (node->local.finalized && !gimple_has_body_p (decl))
1035 cgraph_reset_node (node);
1036
1037 if (!node->reachable && gimple_has_body_p (decl))
1038 {
1039 if (cgraph_dump_file)
1040 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1041 cgraph_remove_node (node);
1042 continue;
1043 }
1044 else
1045 node->next_needed = NULL;
1046 gcc_assert (!node->local.finalized || gimple_has_body_p (decl));
1047 gcc_assert (node->analyzed == node->local.finalized);
1048 }
1049 if (cgraph_dump_file)
1050 {
1051 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1052 dump_cgraph (cgraph_dump_file);
1053 }
1054 first_analyzed = cgraph_nodes;
1055 ggc_collect ();
1056 }
1057
1058
1059 /* Analyze the whole compilation unit once it is parsed completely. */
1060
1061 void
1062 cgraph_finalize_compilation_unit (void)
1063 {
1064 timevar_push (TV_CGRAPH);
1065
1066 /* Do not skip analyzing the functions if there were errors, we
1067 miss diagnostics for following functions otherwise. */
1068
1069 /* Emit size functions we didn't inline. */
1070 finalize_size_functions ();
1071
1072 /* Call functions declared with the "constructor" or "destructor"
1073 attribute. */
1074 cgraph_build_cdtor_fns ();
1075
1076 /* Mark alias targets necessary and emit diagnostics. */
1077 finish_aliases_1 ();
1078
1079 if (!quiet_flag)
1080 {
1081 fprintf (stderr, "\nAnalyzing compilation unit\n");
1082 fflush (stderr);
1083 }
1084
1085 /* Gimplify and lower all functions, compute reachability and
1086 remove unreachable nodes. */
1087 cgraph_analyze_functions ();
1088
1089 /* Mark alias targets necessary and emit diagnostics. */
1090 finish_aliases_1 ();
1091
1092 /* Gimplify and lower thunks. */
1093 cgraph_analyze_functions ();
1094
1095 /* Finally drive the pass manager. */
1096 cgraph_optimize ();
1097
1098 timevar_pop (TV_CGRAPH);
1099 }
1100
1101
1102 /* Figure out what functions we want to assemble. */
1103
1104 static void
1105 cgraph_mark_functions_to_output (void)
1106 {
1107 struct cgraph_node *node;
1108 #ifdef ENABLE_CHECKING
1109 bool check_same_comdat_groups = false;
1110
1111 for (node = cgraph_nodes; node; node = node->next)
1112 gcc_assert (!node->process);
1113 #endif
1114
1115 for (node = cgraph_nodes; node; node = node->next)
1116 {
1117 tree decl = node->decl;
1118 struct cgraph_edge *e;
1119
1120 gcc_assert (!node->process || node->same_comdat_group);
1121 if (node->process)
1122 continue;
1123
1124 for (e = node->callers; e; e = e->next_caller)
1125 if (e->inline_failed)
1126 break;
1127
1128 /* We need to output all local functions that are used and not
1129 always inlined, as well as those that are reachable from
1130 outside the current compilation unit. */
1131 if (node->analyzed
1132 && !node->global.inlined_to
1133 && (node->needed || node->reachable_from_other_partition
1134 || (e && node->reachable))
1135 && !TREE_ASM_WRITTEN (decl)
1136 && !DECL_EXTERNAL (decl))
1137 {
1138 node->process = 1;
1139 if (node->same_comdat_group)
1140 {
1141 struct cgraph_node *next;
1142 for (next = node->same_comdat_group;
1143 next != node;
1144 next = next->same_comdat_group)
1145 next->process = 1;
1146 }
1147 }
1148 else if (node->same_comdat_group)
1149 {
1150 #ifdef ENABLE_CHECKING
1151 check_same_comdat_groups = true;
1152 #endif
1153 }
1154 else
1155 {
1156 /* We should've reclaimed all functions that are not needed. */
1157 #ifdef ENABLE_CHECKING
1158 if (!node->global.inlined_to
1159 && gimple_has_body_p (decl)
1160 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1161 are inside partition, we can end up not removing the body since we no longer
1162 have analyzed node pointing to it. */
1163 && !node->in_other_partition
1164 && !DECL_EXTERNAL (decl))
1165 {
1166 dump_cgraph_node (stderr, node);
1167 internal_error ("failed to reclaim unneeded function");
1168 }
1169 #endif
1170 gcc_assert (node->global.inlined_to
1171 || !gimple_has_body_p (decl)
1172 || node->in_other_partition
1173 || DECL_EXTERNAL (decl));
1174
1175 }
1176
1177 }
1178 #ifdef ENABLE_CHECKING
1179 if (check_same_comdat_groups)
1180 for (node = cgraph_nodes; node; node = node->next)
1181 if (node->same_comdat_group && !node->process)
1182 {
1183 tree decl = node->decl;
1184 if (!node->global.inlined_to
1185 && gimple_has_body_p (decl)
1186 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1187 are inside partition, we can end up not removing the body since we no longer
1188 have analyzed node pointing to it. */
1189 && !node->in_other_partition
1190 && !DECL_EXTERNAL (decl))
1191 {
1192 dump_cgraph_node (stderr, node);
1193 internal_error ("failed to reclaim unneeded function");
1194 }
1195 }
1196 #endif
1197 }
1198
1199 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1200 in lowered gimple form.
1201
1202 Set current_function_decl and cfun to newly constructed empty function body.
1203 return basic block in the function body. */
1204
1205 static basic_block
1206 init_lowered_empty_function (tree decl)
1207 {
1208 basic_block bb;
1209
1210 current_function_decl = decl;
1211 allocate_struct_function (decl, false);
1212 gimple_register_cfg_hooks ();
1213 init_empty_tree_cfg ();
1214 init_tree_ssa (cfun);
1215 init_ssa_operands ();
1216 cfun->gimple_df->in_ssa_p = true;
1217 DECL_INITIAL (decl) = make_node (BLOCK);
1218
1219 DECL_SAVED_TREE (decl) = error_mark_node;
1220 cfun->curr_properties |=
1221 (PROP_gimple_lcf | PROP_gimple_leh | PROP_cfg | PROP_referenced_vars |
1222 PROP_ssa);
1223
1224 /* Create BB for body of the function and connect it properly. */
1225 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
1226 make_edge (ENTRY_BLOCK_PTR, bb, 0);
1227 make_edge (bb, EXIT_BLOCK_PTR, 0);
1228
1229 return bb;
1230 }
1231
1232 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1233 offset indicated by VIRTUAL_OFFSET, if that is
1234 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1235 zero for a result adjusting thunk. */
1236
1237 static tree
1238 thunk_adjust (gimple_stmt_iterator * bsi,
1239 tree ptr, bool this_adjusting,
1240 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1241 {
1242 gimple stmt;
1243 tree ret;
1244
1245 if (this_adjusting
1246 && fixed_offset != 0)
1247 {
1248 stmt = gimple_build_assign (ptr,
1249 fold_build2_loc (input_location,
1250 POINTER_PLUS_EXPR,
1251 TREE_TYPE (ptr), ptr,
1252 size_int (fixed_offset)));
1253 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1254 }
1255
1256 /* If there's a virtual offset, look up that value in the vtable and
1257 adjust the pointer again. */
1258 if (virtual_offset)
1259 {
1260 tree vtabletmp;
1261 tree vtabletmp2;
1262 tree vtabletmp3;
1263 tree offsettmp;
1264
1265 if (!vtable_entry_type)
1266 {
1267 tree vfunc_type = make_node (FUNCTION_TYPE);
1268 TREE_TYPE (vfunc_type) = integer_type_node;
1269 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1270 layout_type (vfunc_type);
1271
1272 vtable_entry_type = build_pointer_type (vfunc_type);
1273 }
1274
1275 vtabletmp =
1276 create_tmp_var (build_pointer_type
1277 (build_pointer_type (vtable_entry_type)), "vptr");
1278
1279 /* The vptr is always at offset zero in the object. */
1280 stmt = gimple_build_assign (vtabletmp,
1281 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1282 ptr));
1283 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1284 mark_symbols_for_renaming (stmt);
1285 find_referenced_vars_in (stmt);
1286
1287 /* Form the vtable address. */
1288 vtabletmp2 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp)),
1289 "vtableaddr");
1290 stmt = gimple_build_assign (vtabletmp2,
1291 build1 (INDIRECT_REF,
1292 TREE_TYPE (vtabletmp2), vtabletmp));
1293 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1294 mark_symbols_for_renaming (stmt);
1295 find_referenced_vars_in (stmt);
1296
1297 /* Find the entry with the vcall offset. */
1298 stmt = gimple_build_assign (vtabletmp2,
1299 fold_build2_loc (input_location,
1300 POINTER_PLUS_EXPR,
1301 TREE_TYPE (vtabletmp2),
1302 vtabletmp2,
1303 fold_convert (sizetype,
1304 virtual_offset)));
1305 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1306
1307 /* Get the offset itself. */
1308 vtabletmp3 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1309 "vcalloffset");
1310 stmt = gimple_build_assign (vtabletmp3,
1311 build1 (INDIRECT_REF,
1312 TREE_TYPE (vtabletmp3),
1313 vtabletmp2));
1314 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1315 mark_symbols_for_renaming (stmt);
1316 find_referenced_vars_in (stmt);
1317
1318 /* Cast to sizetype. */
1319 offsettmp = create_tmp_var (sizetype, "offset");
1320 stmt = gimple_build_assign (offsettmp, fold_convert (sizetype, vtabletmp3));
1321 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1322 mark_symbols_for_renaming (stmt);
1323 find_referenced_vars_in (stmt);
1324
1325 /* Adjust the `this' pointer. */
1326 ptr = fold_build2_loc (input_location,
1327 POINTER_PLUS_EXPR, TREE_TYPE (ptr), ptr,
1328 offsettmp);
1329 }
1330
1331 if (!this_adjusting
1332 && fixed_offset != 0)
1333 /* Adjust the pointer by the constant. */
1334 {
1335 tree ptrtmp;
1336
1337 if (TREE_CODE (ptr) == VAR_DECL)
1338 ptrtmp = ptr;
1339 else
1340 {
1341 ptrtmp = create_tmp_var (TREE_TYPE (ptr), "ptr");
1342 stmt = gimple_build_assign (ptrtmp, ptr);
1343 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1344 mark_symbols_for_renaming (stmt);
1345 find_referenced_vars_in (stmt);
1346 }
1347 ptr = fold_build2_loc (input_location,
1348 POINTER_PLUS_EXPR, TREE_TYPE (ptrtmp), ptrtmp,
1349 size_int (fixed_offset));
1350 }
1351
1352 /* Emit the statement and gimplify the adjustment expression. */
1353 ret = create_tmp_var (TREE_TYPE (ptr), "adjusted_this");
1354 stmt = gimple_build_assign (ret, ptr);
1355 mark_symbols_for_renaming (stmt);
1356 find_referenced_vars_in (stmt);
1357 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1358
1359 return ret;
1360 }
1361
1362 /* Produce assembler for thunk NODE. */
1363
1364 static void
1365 assemble_thunk (struct cgraph_node *node)
1366 {
1367 bool this_adjusting = node->thunk.this_adjusting;
1368 HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1369 HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1370 tree virtual_offset = NULL;
1371 tree alias = node->thunk.alias;
1372 tree thunk_fndecl = node->decl;
1373 tree a = DECL_ARGUMENTS (thunk_fndecl);
1374
1375 current_function_decl = thunk_fndecl;
1376
1377 if (this_adjusting
1378 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1379 virtual_value, alias))
1380 {
1381 const char *fnname;
1382 tree fn_block;
1383
1384 DECL_RESULT (thunk_fndecl)
1385 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1386 RESULT_DECL, 0, integer_type_node);
1387 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1388
1389 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1390 create one. */
1391 fn_block = make_node (BLOCK);
1392 BLOCK_VARS (fn_block) = a;
1393 DECL_INITIAL (thunk_fndecl) = fn_block;
1394 init_function_start (thunk_fndecl);
1395 cfun->is_thunk = 1;
1396 assemble_start_function (thunk_fndecl, fnname);
1397
1398 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1399 fixed_offset, virtual_value, alias);
1400
1401 assemble_end_function (thunk_fndecl, fnname);
1402 init_insn_lengths ();
1403 free_after_compilation (cfun);
1404 set_cfun (NULL);
1405 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1406 }
1407 else
1408 {
1409 tree restype;
1410 basic_block bb, then_bb, else_bb, return_bb;
1411 gimple_stmt_iterator bsi;
1412 int nargs = 0;
1413 tree arg;
1414 int i;
1415 tree resdecl;
1416 tree restmp = NULL;
1417 VEC(tree, heap) *vargs;
1418
1419 gimple call;
1420 gimple ret;
1421
1422 DECL_IGNORED_P (thunk_fndecl) = 1;
1423 bitmap_obstack_initialize (NULL);
1424
1425 if (node->thunk.virtual_offset_p)
1426 virtual_offset = size_int (virtual_value);
1427
1428 /* Build the return declaration for the function. */
1429 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1430 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1431 {
1432 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1433 DECL_ARTIFICIAL (resdecl) = 1;
1434 DECL_IGNORED_P (resdecl) = 1;
1435 DECL_RESULT (thunk_fndecl) = resdecl;
1436 }
1437 else
1438 resdecl = DECL_RESULT (thunk_fndecl);
1439
1440 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl);
1441
1442 bsi = gsi_start_bb (bb);
1443
1444 /* Build call to the function being thunked. */
1445 if (!VOID_TYPE_P (restype))
1446 {
1447 if (!is_gimple_reg_type (restype))
1448 {
1449 restmp = resdecl;
1450 cfun->local_decls = tree_cons (NULL_TREE, restmp, cfun->local_decls);
1451 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1452 }
1453 else
1454 restmp = create_tmp_var_raw (restype, "retval");
1455 }
1456
1457 for (arg = a; arg; arg = TREE_CHAIN (arg))
1458 nargs++;
1459 vargs = VEC_alloc (tree, heap, nargs);
1460 if (this_adjusting)
1461 VEC_quick_push (tree, vargs,
1462 thunk_adjust (&bsi,
1463 a, 1, fixed_offset,
1464 virtual_offset));
1465 else
1466 VEC_quick_push (tree, vargs, a);
1467 for (i = 1, arg = TREE_CHAIN (a); i < nargs; i++, arg = TREE_CHAIN (arg))
1468 VEC_quick_push (tree, vargs, arg);
1469 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1470 VEC_free (tree, heap, vargs);
1471 gimple_call_set_cannot_inline (call, true);
1472 gimple_call_set_from_thunk (call, true);
1473 if (restmp)
1474 gimple_call_set_lhs (call, restmp);
1475 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1476 mark_symbols_for_renaming (call);
1477 find_referenced_vars_in (call);
1478 update_stmt (call);
1479
1480 if (restmp && !this_adjusting)
1481 {
1482 tree true_label = NULL_TREE;
1483
1484 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1485 {
1486 gimple stmt;
1487 /* If the return type is a pointer, we need to
1488 protect against NULL. We know there will be an
1489 adjustment, because that's why we're emitting a
1490 thunk. */
1491 then_bb = create_basic_block (NULL, (void *) 0, bb);
1492 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1493 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1494 remove_edge (single_succ_edge (bb));
1495 true_label = gimple_block_label (then_bb);
1496 stmt = gimple_build_cond (NE_EXPR, restmp,
1497 fold_convert (TREE_TYPE (restmp),
1498 integer_zero_node),
1499 NULL_TREE, NULL_TREE);
1500 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1501 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1502 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1503 make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1504 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1505 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1506 bsi = gsi_last_bb (then_bb);
1507 }
1508
1509 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1510 fixed_offset, virtual_offset);
1511 if (true_label)
1512 {
1513 gimple stmt;
1514 bsi = gsi_last_bb (else_bb);
1515 stmt = gimple_build_assign (restmp, fold_convert (TREE_TYPE (restmp),
1516 integer_zero_node));
1517 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1518 bsi = gsi_last_bb (return_bb);
1519 }
1520 }
1521 else
1522 gimple_call_set_tail (call, true);
1523
1524 /* Build return value. */
1525 ret = gimple_build_return (restmp);
1526 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1527
1528 delete_unreachable_blocks ();
1529 update_ssa (TODO_update_ssa);
1530
1531 cgraph_remove_same_body_alias (node);
1532 /* Since we want to emit the thunk, we explicitly mark its name as
1533 referenced. */
1534 mark_decl_referenced (thunk_fndecl);
1535 cgraph_add_new_function (thunk_fndecl, true);
1536 bitmap_obstack_release (NULL);
1537 }
1538 current_function_decl = NULL;
1539 }
1540
1541 /* Expand function specified by NODE. */
1542
1543 static void
1544 cgraph_expand_function (struct cgraph_node *node)
1545 {
1546 tree decl = node->decl;
1547
1548 /* We ought to not compile any inline clones. */
1549 gcc_assert (!node->global.inlined_to);
1550
1551 announce_function (decl);
1552 node->process = 0;
1553
1554 gcc_assert (node->lowered);
1555
1556 /* Generate RTL for the body of DECL. */
1557 tree_rest_of_compilation (decl);
1558
1559 /* Make sure that BE didn't give up on compiling. */
1560 gcc_assert (TREE_ASM_WRITTEN (decl));
1561 current_function_decl = NULL;
1562 if (node->same_body)
1563 {
1564 struct cgraph_node *alias, *next;
1565 bool saved_alias = node->alias;
1566 for (alias = node->same_body;
1567 alias && alias->next; alias = alias->next)
1568 ;
1569 /* Walk aliases in the order they were created; it is possible that
1570 thunks reffers to the aliases made earlier. */
1571 for (; alias; alias = next)
1572 {
1573 next = alias->previous;
1574 if (!alias->thunk.thunk_p)
1575 assemble_alias (alias->decl,
1576 DECL_ASSEMBLER_NAME (alias->thunk.alias));
1577 else
1578 assemble_thunk (alias);
1579 }
1580 node->alias = saved_alias;
1581 }
1582 gcc_assert (!cgraph_preserve_function_body_p (decl));
1583 cgraph_release_function_body (node);
1584 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1585 points to the dead function body. */
1586 cgraph_node_remove_callees (node);
1587
1588 cgraph_function_flags_ready = true;
1589 }
1590
1591 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
1592
1593 bool
1594 cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason)
1595 {
1596 *reason = e->inline_failed;
1597 return !e->inline_failed;
1598 }
1599
1600
1601
1602 /* Expand all functions that must be output.
1603
1604 Attempt to topologically sort the nodes so function is output when
1605 all called functions are already assembled to allow data to be
1606 propagated across the callgraph. Use a stack to get smaller distance
1607 between a function and its callees (later we may choose to use a more
1608 sophisticated algorithm for function reordering; we will likely want
1609 to use subsections to make the output functions appear in top-down
1610 order). */
1611
1612 static void
1613 cgraph_expand_all_functions (void)
1614 {
1615 struct cgraph_node *node;
1616 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1617 int order_pos, new_order_pos = 0;
1618 int i;
1619
1620 order_pos = cgraph_postorder (order);
1621 gcc_assert (order_pos == cgraph_n_nodes);
1622
1623 /* Garbage collector may remove inline clones we eliminate during
1624 optimization. So we must be sure to not reference them. */
1625 for (i = 0; i < order_pos; i++)
1626 if (order[i]->process)
1627 order[new_order_pos++] = order[i];
1628
1629 for (i = new_order_pos - 1; i >= 0; i--)
1630 {
1631 node = order[i];
1632 if (node->process)
1633 {
1634 gcc_assert (node->reachable);
1635 node->process = 0;
1636 cgraph_expand_function (node);
1637 }
1638 }
1639 cgraph_process_new_functions ();
1640
1641 free (order);
1642
1643 }
1644
1645 /* This is used to sort the node types by the cgraph order number. */
1646
1647 enum cgraph_order_sort_kind
1648 {
1649 ORDER_UNDEFINED = 0,
1650 ORDER_FUNCTION,
1651 ORDER_VAR,
1652 ORDER_ASM
1653 };
1654
1655 struct cgraph_order_sort
1656 {
1657 enum cgraph_order_sort_kind kind;
1658 union
1659 {
1660 struct cgraph_node *f;
1661 struct varpool_node *v;
1662 struct cgraph_asm_node *a;
1663 } u;
1664 };
1665
1666 /* Output all functions, variables, and asm statements in the order
1667 according to their order fields, which is the order in which they
1668 appeared in the file. This implements -fno-toplevel-reorder. In
1669 this mode we may output functions and variables which don't really
1670 need to be output. */
1671
1672 static void
1673 cgraph_output_in_order (void)
1674 {
1675 int max;
1676 struct cgraph_order_sort *nodes;
1677 int i;
1678 struct cgraph_node *pf;
1679 struct varpool_node *pv;
1680 struct cgraph_asm_node *pa;
1681
1682 max = cgraph_order;
1683 nodes = XCNEWVEC (struct cgraph_order_sort, max);
1684
1685 varpool_analyze_pending_decls ();
1686
1687 for (pf = cgraph_nodes; pf; pf = pf->next)
1688 {
1689 if (pf->process)
1690 {
1691 i = pf->order;
1692 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1693 nodes[i].kind = ORDER_FUNCTION;
1694 nodes[i].u.f = pf;
1695 }
1696 }
1697
1698 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
1699 {
1700 i = pv->order;
1701 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1702 nodes[i].kind = ORDER_VAR;
1703 nodes[i].u.v = pv;
1704 }
1705
1706 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1707 {
1708 i = pa->order;
1709 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1710 nodes[i].kind = ORDER_ASM;
1711 nodes[i].u.a = pa;
1712 }
1713
1714 /* In toplevel reorder mode we output all statics; mark them as needed. */
1715 for (i = 0; i < max; ++i)
1716 {
1717 if (nodes[i].kind == ORDER_VAR)
1718 {
1719 varpool_mark_needed_node (nodes[i].u.v);
1720 }
1721 }
1722 varpool_empty_needed_queue ();
1723
1724 for (i = 0; i < max; ++i)
1725 {
1726 switch (nodes[i].kind)
1727 {
1728 case ORDER_FUNCTION:
1729 nodes[i].u.f->process = 0;
1730 cgraph_expand_function (nodes[i].u.f);
1731 break;
1732
1733 case ORDER_VAR:
1734 varpool_assemble_decl (nodes[i].u.v);
1735 break;
1736
1737 case ORDER_ASM:
1738 assemble_asm (nodes[i].u.a->asm_str);
1739 break;
1740
1741 case ORDER_UNDEFINED:
1742 break;
1743
1744 default:
1745 gcc_unreachable ();
1746 }
1747 }
1748
1749 cgraph_asm_nodes = NULL;
1750 free (nodes);
1751 }
1752
1753 /* Return true when function body of DECL still needs to be kept around
1754 for later re-use. */
1755 bool
1756 cgraph_preserve_function_body_p (tree decl)
1757 {
1758 struct cgraph_node *node;
1759
1760 gcc_assert (cgraph_global_info_ready);
1761 /* Look if there is any clone around. */
1762 node = cgraph_node (decl);
1763 if (node->clones)
1764 return true;
1765 return false;
1766 }
1767
1768 static void
1769 ipa_passes (void)
1770 {
1771 set_cfun (NULL);
1772 current_function_decl = NULL;
1773 gimple_register_cfg_hooks ();
1774 bitmap_obstack_initialize (NULL);
1775
1776 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
1777
1778 if (!in_lto_p)
1779 execute_ipa_pass_list (all_small_ipa_passes);
1780
1781 /* If pass_all_early_optimizations was not scheduled, the state of
1782 the cgraph will not be properly updated. Update it now. */
1783 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
1784 cgraph_state = CGRAPH_STATE_IPA_SSA;
1785
1786 if (!in_lto_p)
1787 {
1788 /* Generate coverage variables and constructors. */
1789 coverage_finish ();
1790
1791 /* Process new functions added. */
1792 set_cfun (NULL);
1793 current_function_decl = NULL;
1794 cgraph_process_new_functions ();
1795
1796 execute_ipa_summary_passes
1797 ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
1798 }
1799 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
1800
1801 if (!in_lto_p)
1802 ipa_write_summaries ();
1803
1804 if (!flag_ltrans)
1805 execute_ipa_pass_list (all_regular_ipa_passes);
1806 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
1807
1808 bitmap_obstack_release (NULL);
1809 }
1810
1811
1812 /* Perform simple optimizations based on callgraph. */
1813
1814 void
1815 cgraph_optimize (void)
1816 {
1817 if (errorcount || sorrycount)
1818 return;
1819
1820 #ifdef ENABLE_CHECKING
1821 verify_cgraph ();
1822 #endif
1823
1824 /* Frontend may output common variables after the unit has been finalized.
1825 It is safe to deal with them here as they are always zero initialized. */
1826 varpool_analyze_pending_decls ();
1827
1828 timevar_push (TV_CGRAPHOPT);
1829 if (pre_ipa_mem_report)
1830 {
1831 fprintf (stderr, "Memory consumption before IPA\n");
1832 dump_memory_report (false);
1833 }
1834 if (!quiet_flag)
1835 fprintf (stderr, "Performing interprocedural optimizations\n");
1836 cgraph_state = CGRAPH_STATE_IPA;
1837
1838 /* Don't run the IPA passes if there was any error or sorry messages. */
1839 if (errorcount == 0 && sorrycount == 0)
1840 ipa_passes ();
1841
1842 /* Do nothing else if any IPA pass found errors. */
1843 if (errorcount || sorrycount)
1844 {
1845 timevar_pop (TV_CGRAPHOPT);
1846 return;
1847 }
1848
1849 /* This pass remove bodies of extern inline functions we never inlined.
1850 Do this later so other IPA passes see what is really going on. */
1851 cgraph_remove_unreachable_nodes (false, dump_file);
1852 cgraph_global_info_ready = true;
1853 if (cgraph_dump_file)
1854 {
1855 fprintf (cgraph_dump_file, "Optimized ");
1856 dump_cgraph (cgraph_dump_file);
1857 dump_varpool (cgraph_dump_file);
1858 }
1859 if (post_ipa_mem_report)
1860 {
1861 fprintf (stderr, "Memory consumption after IPA\n");
1862 dump_memory_report (false);
1863 }
1864 timevar_pop (TV_CGRAPHOPT);
1865
1866 /* Output everything. */
1867 (*debug_hooks->assembly_start) ();
1868 if (!quiet_flag)
1869 fprintf (stderr, "Assembling functions:\n");
1870 #ifdef ENABLE_CHECKING
1871 verify_cgraph ();
1872 #endif
1873
1874 cgraph_materialize_all_clones ();
1875 cgraph_mark_functions_to_output ();
1876
1877 cgraph_state = CGRAPH_STATE_EXPANSION;
1878 if (!flag_toplevel_reorder)
1879 cgraph_output_in_order ();
1880 else
1881 {
1882 cgraph_output_pending_asms ();
1883
1884 cgraph_expand_all_functions ();
1885 varpool_remove_unreferenced_decls ();
1886
1887 varpool_assemble_pending_decls ();
1888 }
1889 cgraph_process_new_functions ();
1890 cgraph_state = CGRAPH_STATE_FINISHED;
1891
1892 if (cgraph_dump_file)
1893 {
1894 fprintf (cgraph_dump_file, "\nFinal ");
1895 dump_cgraph (cgraph_dump_file);
1896 }
1897 #ifdef ENABLE_CHECKING
1898 verify_cgraph ();
1899 /* Double check that all inline clones are gone and that all
1900 function bodies have been released from memory. */
1901 if (!(sorrycount || errorcount))
1902 {
1903 struct cgraph_node *node;
1904 bool error_found = false;
1905
1906 for (node = cgraph_nodes; node; node = node->next)
1907 if (node->analyzed
1908 && (node->global.inlined_to
1909 || gimple_has_body_p (node->decl)))
1910 {
1911 error_found = true;
1912 dump_cgraph_node (stderr, node);
1913 }
1914 if (error_found)
1915 internal_error ("nodes with unreleased memory found");
1916 }
1917 #endif
1918 }
1919
1920
1921 /* Generate and emit a static constructor or destructor. WHICH must
1922 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
1923 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
1924 initialization priority for this constructor or destructor. */
1925
1926 void
1927 cgraph_build_static_cdtor (char which, tree body, int priority)
1928 {
1929 static int counter = 0;
1930 char which_buf[16];
1931 tree decl, name, resdecl;
1932
1933 /* The priority is encoded in the constructor or destructor name.
1934 collect2 will sort the names and arrange that they are called at
1935 program startup. */
1936 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
1937 name = get_file_function_name (which_buf);
1938
1939 decl = build_decl (input_location, FUNCTION_DECL, name,
1940 build_function_type (void_type_node, void_list_node));
1941 current_function_decl = decl;
1942
1943 resdecl = build_decl (input_location,
1944 RESULT_DECL, NULL_TREE, void_type_node);
1945 DECL_ARTIFICIAL (resdecl) = 1;
1946 DECL_RESULT (decl) = resdecl;
1947 DECL_CONTEXT (resdecl) = decl;
1948
1949 allocate_struct_function (decl, false);
1950
1951 TREE_STATIC (decl) = 1;
1952 TREE_USED (decl) = 1;
1953 DECL_ARTIFICIAL (decl) = 1;
1954 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1955 DECL_SAVED_TREE (decl) = body;
1956 if (!targetm.have_ctors_dtors)
1957 {
1958 TREE_PUBLIC (decl) = 1;
1959 DECL_PRESERVE_P (decl) = 1;
1960 }
1961 DECL_UNINLINABLE (decl) = 1;
1962
1963 DECL_INITIAL (decl) = make_node (BLOCK);
1964 TREE_USED (DECL_INITIAL (decl)) = 1;
1965
1966 DECL_SOURCE_LOCATION (decl) = input_location;
1967 cfun->function_end_locus = input_location;
1968
1969 switch (which)
1970 {
1971 case 'I':
1972 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1973 decl_init_priority_insert (decl, priority);
1974 break;
1975 case 'D':
1976 DECL_STATIC_DESTRUCTOR (decl) = 1;
1977 decl_fini_priority_insert (decl, priority);
1978 break;
1979 default:
1980 gcc_unreachable ();
1981 }
1982
1983 gimplify_function_tree (decl);
1984
1985 cgraph_add_new_function (decl, false);
1986 cgraph_mark_needed_node (cgraph_node (decl));
1987 set_cfun (NULL);
1988 }
1989
1990 void
1991 init_cgraph (void)
1992 {
1993 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1994 }
1995
1996 /* The edges representing the callers of the NEW_VERSION node were
1997 fixed by cgraph_function_versioning (), now the call_expr in their
1998 respective tree code should be updated to call the NEW_VERSION. */
1999
2000 static void
2001 update_call_expr (struct cgraph_node *new_version)
2002 {
2003 struct cgraph_edge *e;
2004
2005 gcc_assert (new_version);
2006
2007 /* Update the call expr on the edges to call the new version. */
2008 for (e = new_version->callers; e; e = e->next_caller)
2009 {
2010 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
2011 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
2012 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
2013 }
2014 }
2015
2016
2017 /* Create a new cgraph node which is the new version of
2018 OLD_VERSION node. REDIRECT_CALLERS holds the callers
2019 edges which should be redirected to point to
2020 NEW_VERSION. ALL the callees edges of OLD_VERSION
2021 are cloned to the new version node. Return the new
2022 version node. */
2023
2024 static struct cgraph_node *
2025 cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
2026 tree new_decl,
2027 VEC(cgraph_edge_p,heap) *redirect_callers)
2028 {
2029 struct cgraph_node *new_version;
2030 struct cgraph_edge *e, *new_e;
2031 struct cgraph_edge *next_callee;
2032 unsigned i;
2033
2034 gcc_assert (old_version);
2035
2036 new_version = cgraph_node (new_decl);
2037
2038 new_version->analyzed = true;
2039 new_version->local = old_version->local;
2040 new_version->global = old_version->global;
2041 new_version->rtl = new_version->rtl;
2042 new_version->reachable = true;
2043 new_version->count = old_version->count;
2044
2045 /* Clone the old node callees. Recursive calls are
2046 also cloned. */
2047 for (e = old_version->callees;e; e=e->next_callee)
2048 {
2049 new_e = cgraph_clone_edge (e, new_version, e->call_stmt,
2050 e->lto_stmt_uid, 0, e->frequency,
2051 e->loop_nest, true);
2052 new_e->count = e->count;
2053 }
2054 /* Fix recursive calls.
2055 If OLD_VERSION has a recursive call after the
2056 previous edge cloning, the new version will have an edge
2057 pointing to the old version, which is wrong;
2058 Redirect it to point to the new version. */
2059 for (e = new_version->callees ; e; e = next_callee)
2060 {
2061 next_callee = e->next_callee;
2062 if (e->callee == old_version)
2063 cgraph_redirect_edge_callee (e, new_version);
2064
2065 if (!next_callee)
2066 break;
2067 }
2068 for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
2069 {
2070 /* Redirect calls to the old version node to point to its new
2071 version. */
2072 cgraph_redirect_edge_callee (e, new_version);
2073 }
2074
2075 return new_version;
2076 }
2077
2078 /* Perform function versioning.
2079 Function versioning includes copying of the tree and
2080 a callgraph update (creating a new cgraph node and updating
2081 its callees and callers).
2082
2083 REDIRECT_CALLERS varray includes the edges to be redirected
2084 to the new version.
2085
2086 TREE_MAP is a mapping of tree nodes we want to replace with
2087 new ones (according to results of prior analysis).
2088 OLD_VERSION_NODE is the node that is versioned.
2089 It returns the new version's cgraph node.
2090 ARGS_TO_SKIP lists arguments to be omitted from functions
2091 */
2092
2093 struct cgraph_node *
2094 cgraph_function_versioning (struct cgraph_node *old_version_node,
2095 VEC(cgraph_edge_p,heap) *redirect_callers,
2096 VEC (ipa_replace_map_p,gc)* tree_map,
2097 bitmap args_to_skip)
2098 {
2099 tree old_decl = old_version_node->decl;
2100 struct cgraph_node *new_version_node = NULL;
2101 tree new_decl;
2102
2103 if (!tree_versionable_function_p (old_decl))
2104 return NULL;
2105
2106 /* Make a new FUNCTION_DECL tree node for the
2107 new version. */
2108 if (!args_to_skip)
2109 new_decl = copy_node (old_decl);
2110 else
2111 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
2112
2113 /* Create the new version's call-graph node.
2114 and update the edges of the new node. */
2115 new_version_node =
2116 cgraph_copy_node_for_versioning (old_version_node, new_decl,
2117 redirect_callers);
2118
2119 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2120 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip);
2121
2122 /* Update the new version's properties.
2123 Make The new version visible only within this translation unit. Make sure
2124 that is not weak also.
2125 ??? We cannot use COMDAT linkage because there is no
2126 ABI support for this. */
2127 cgraph_make_decl_local (new_version_node->decl);
2128 DECL_VIRTUAL_P (new_version_node->decl) = 0;
2129 new_version_node->local.externally_visible = 0;
2130 new_version_node->local.local = 1;
2131 new_version_node->lowered = true;
2132
2133 /* Update the call_expr on the edges to call the new version node. */
2134 update_call_expr (new_version_node);
2135
2136 cgraph_call_function_insertion_hooks (new_version_node);
2137 return new_version_node;
2138 }
2139
2140 /* Produce separate function body for inline clones so the offline copy can be
2141 modified without affecting them. */
2142 struct cgraph_node *
2143 save_inline_function_body (struct cgraph_node *node)
2144 {
2145 struct cgraph_node *first_clone, *n;
2146
2147 gcc_assert (node == cgraph_node (node->decl));
2148
2149 cgraph_lower_function (node);
2150
2151 first_clone = node->clones;
2152
2153 first_clone->decl = copy_node (node->decl);
2154 cgraph_insert_node_to_hashtable (first_clone);
2155 gcc_assert (first_clone == cgraph_node (first_clone->decl));
2156 if (first_clone->next_sibling_clone)
2157 {
2158 for (n = first_clone->next_sibling_clone; n->next_sibling_clone; n = n->next_sibling_clone)
2159 n->clone_of = first_clone;
2160 n->clone_of = first_clone;
2161 n->next_sibling_clone = first_clone->clones;
2162 if (first_clone->clones)
2163 first_clone->clones->prev_sibling_clone = n;
2164 first_clone->clones = first_clone->next_sibling_clone;
2165 first_clone->next_sibling_clone->prev_sibling_clone = NULL;
2166 first_clone->next_sibling_clone = NULL;
2167 gcc_assert (!first_clone->prev_sibling_clone);
2168 }
2169 first_clone->clone_of = NULL;
2170 node->clones = NULL;
2171
2172 if (first_clone->clones)
2173 for (n = first_clone->clones; n != first_clone;)
2174 {
2175 gcc_assert (n->decl == node->decl);
2176 n->decl = first_clone->decl;
2177 if (n->clones)
2178 n = n->clones;
2179 else if (n->next_sibling_clone)
2180 n = n->next_sibling_clone;
2181 else
2182 {
2183 while (n != first_clone && !n->next_sibling_clone)
2184 n = n->clone_of;
2185 if (n != first_clone)
2186 n = n->next_sibling_clone;
2187 }
2188 }
2189
2190 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2191 tree_function_versioning (node->decl, first_clone->decl, NULL, true, NULL);
2192
2193 DECL_EXTERNAL (first_clone->decl) = 0;
2194 DECL_COMDAT_GROUP (first_clone->decl) = NULL_TREE;
2195 TREE_PUBLIC (first_clone->decl) = 0;
2196 DECL_COMDAT (first_clone->decl) = 0;
2197 VEC_free (ipa_opt_pass, heap,
2198 first_clone->ipa_transforms_to_apply);
2199 first_clone->ipa_transforms_to_apply = NULL;
2200
2201 #ifdef ENABLE_CHECKING
2202 verify_cgraph_node (first_clone);
2203 #endif
2204 return first_clone;
2205 }
2206
2207 /* Given virtual clone, turn it into actual clone. */
2208 static void
2209 cgraph_materialize_clone (struct cgraph_node *node)
2210 {
2211 bitmap_obstack_initialize (NULL);
2212 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2213 tree_function_versioning (node->clone_of->decl, node->decl,
2214 node->clone.tree_map, true,
2215 node->clone.args_to_skip);
2216 if (cgraph_dump_file)
2217 {
2218 dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
2219 dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
2220 }
2221
2222 /* Function is no longer clone. */
2223 if (node->next_sibling_clone)
2224 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
2225 if (node->prev_sibling_clone)
2226 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
2227 else
2228 node->clone_of->clones = node->next_sibling_clone;
2229 node->next_sibling_clone = NULL;
2230 node->prev_sibling_clone = NULL;
2231 if (!node->clone_of->analyzed && !node->clone_of->clones)
2232 cgraph_remove_node (node->clone_of);
2233 node->clone_of = NULL;
2234 bitmap_obstack_release (NULL);
2235 }
2236
2237 /* If necessary, change the function declaration in the call statement
2238 associated with E so that it corresponds to the edge callee. */
2239
2240 gimple
2241 cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e)
2242 {
2243 tree decl = gimple_call_fndecl (e->call_stmt);
2244 gimple new_stmt;
2245 gimple_stmt_iterator gsi;
2246
2247 if (!decl || decl == e->callee->decl
2248 /* Don't update call from same body alias to the real function. */
2249 || cgraph_get_node (decl) == cgraph_get_node (e->callee->decl))
2250 return e->call_stmt;
2251
2252 if (cgraph_dump_file)
2253 {
2254 fprintf (cgraph_dump_file, "updating call of %s/%i -> %s/%i: ",
2255 cgraph_node_name (e->caller), e->caller->uid,
2256 cgraph_node_name (e->callee), e->callee->uid);
2257 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2258 }
2259
2260 if (e->callee->clone.combined_args_to_skip)
2261 new_stmt = gimple_call_copy_skip_args (e->call_stmt,
2262 e->callee->clone.combined_args_to_skip);
2263 else
2264 new_stmt = e->call_stmt;
2265 if (gimple_vdef (new_stmt)
2266 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
2267 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2268 gimple_call_set_fndecl (new_stmt, e->callee->decl);
2269
2270 gsi = gsi_for_stmt (e->call_stmt);
2271 gsi_replace (&gsi, new_stmt, true);
2272 update_stmt (new_stmt);
2273
2274 /* Update EH information too, just in case. */
2275 maybe_clean_or_replace_eh_stmt (e->call_stmt, new_stmt);
2276
2277 cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt, new_stmt);
2278
2279 if (cgraph_dump_file)
2280 {
2281 fprintf (cgraph_dump_file, " updated to:");
2282 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2283 }
2284 return new_stmt;
2285 }
2286
2287 /* Once all functions from compilation unit are in memory, produce all clones
2288 and update all calls. We might also do this on demand if we don't want to
2289 bring all functions to memory prior compilation, but current WHOPR
2290 implementation does that and it is is bit easier to keep everything right in
2291 this order. */
2292 void
2293 cgraph_materialize_all_clones (void)
2294 {
2295 struct cgraph_node *node;
2296 bool stabilized = false;
2297
2298 if (cgraph_dump_file)
2299 fprintf (cgraph_dump_file, "Materializing clones\n");
2300 #ifdef ENABLE_CHECKING
2301 verify_cgraph ();
2302 #endif
2303
2304 /* We can also do topological order, but number of iterations should be
2305 bounded by number of IPA passes since single IPA pass is probably not
2306 going to create clones of clones it created itself. */
2307 while (!stabilized)
2308 {
2309 stabilized = true;
2310 for (node = cgraph_nodes; node; node = node->next)
2311 {
2312 if (node->clone_of && node->decl != node->clone_of->decl
2313 && !gimple_has_body_p (node->decl))
2314 {
2315 if (gimple_has_body_p (node->clone_of->decl))
2316 {
2317 if (cgraph_dump_file)
2318 {
2319 fprintf (cgraph_dump_file, "clonning %s to %s\n",
2320 cgraph_node_name (node->clone_of),
2321 cgraph_node_name (node));
2322 if (node->clone.tree_map)
2323 {
2324 unsigned int i;
2325 fprintf (cgraph_dump_file, " replace map: ");
2326 for (i = 0; i < VEC_length (ipa_replace_map_p,
2327 node->clone.tree_map);
2328 i++)
2329 {
2330 struct ipa_replace_map *replace_info;
2331 replace_info = VEC_index (ipa_replace_map_p,
2332 node->clone.tree_map,
2333 i);
2334 print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
2335 fprintf (cgraph_dump_file, " -> ");
2336 print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
2337 fprintf (cgraph_dump_file, "%s%s;",
2338 replace_info->replace_p ? "(replace)":"",
2339 replace_info->ref_p ? "(ref)":"");
2340 }
2341 fprintf (cgraph_dump_file, "\n");
2342 }
2343 if (node->clone.args_to_skip)
2344 {
2345 fprintf (cgraph_dump_file, " args_to_skip: ");
2346 dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
2347 }
2348 if (node->clone.args_to_skip)
2349 {
2350 fprintf (cgraph_dump_file, " combined_args_to_skip:");
2351 dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
2352 }
2353 }
2354 cgraph_materialize_clone (node);
2355 }
2356 else
2357 stabilized = false;
2358 }
2359 }
2360 }
2361 for (node = cgraph_nodes; node; node = node->next)
2362 if (!node->analyzed && node->callees)
2363 cgraph_node_remove_callees (node);
2364 if (cgraph_dump_file)
2365 fprintf (cgraph_dump_file, "Updating call sites\n");
2366 for (node = cgraph_nodes; node; node = node->next)
2367 if (node->analyzed && !node->clone_of
2368 && gimple_has_body_p (node->decl))
2369 {
2370 struct cgraph_edge *e;
2371
2372 current_function_decl = node->decl;
2373 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
2374 for (e = node->callees; e; e = e->next_callee)
2375 cgraph_redirect_edge_call_stmt_to_callee (e);
2376 gcc_assert (!need_ssa_update_p (cfun));
2377 pop_cfun ();
2378 current_function_decl = NULL;
2379 #ifdef ENABLE_CHECKING
2380 verify_cgraph_node (node);
2381 #endif
2382 }
2383 if (cgraph_dump_file)
2384 fprintf (cgraph_dump_file, "Materialization Call site updates done.\n");
2385 /* All changes to parameters have been performed. In order not to
2386 incorrectly repeat them, we simply dispose of the bitmaps that drive the
2387 changes. */
2388 for (node = cgraph_nodes; node; node = node->next)
2389 node->clone.combined_args_to_skip = NULL;
2390 #ifdef ENABLE_CHECKING
2391 verify_cgraph ();
2392 #endif
2393 cgraph_remove_unreachable_nodes (false, cgraph_dump_file);
2394 }
2395
2396 #include "gt-cgraphunit.h"