re PR middle-end/31490 (Compile error section type conflict)
[gcc.git] / gcc / cgraphunit.c
1 /* Callgraph based interprocedural optimizations.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
3 2011 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This module implements main driver of compilation process as well as
23 few basic interprocedural optimizers.
24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
35 (There is one exception needed for implementing GCC extern inline
36 function.)
37
38 - varpool_finalize_variable
39
40 This function has same behavior as the above but is used for static
41 variables.
42
43 - cgraph_finalize_compilation_unit
44
45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
47
48 In the the call-graph construction and local function
49 analysis takes place here. Bodies of unreachable functions are released
50 to conserve memory usage.
51
52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
54
55 - cgraph_optimize
56
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
61
62 - cgraph_mark_needed_node
63 - varpool_mark_needed_node
64
65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
69 used by C++ frontend to explicitly mark the keyed methods.
70
71 - analyze_expr callback
72
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
76
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
79
80 Analyzing of all functions is deferred
81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
82
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
90
91 The intra-procedural information is produced and its existence
92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
95
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
98
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
103
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
106 where reference has been optimized out. */
107
108
109 #include "config.h"
110 #include "system.h"
111 #include "coretypes.h"
112 #include "tm.h"
113 #include "tree.h"
114 #include "rtl.h"
115 #include "tree-flow.h"
116 #include "tree-inline.h"
117 #include "langhooks.h"
118 #include "pointer-set.h"
119 #include "toplev.h"
120 #include "flags.h"
121 #include "ggc.h"
122 #include "debug.h"
123 #include "target.h"
124 #include "cgraph.h"
125 #include "diagnostic.h"
126 #include "tree-pretty-print.h"
127 #include "gimple-pretty-print.h"
128 #include "timevar.h"
129 #include "params.h"
130 #include "fibheap.h"
131 #include "intl.h"
132 #include "function.h"
133 #include "ipa-prop.h"
134 #include "gimple.h"
135 #include "tree-iterator.h"
136 #include "tree-pass.h"
137 #include "tree-dump.h"
138 #include "output.h"
139 #include "coverage.h"
140 #include "plugin.h"
141
142 static void cgraph_expand_all_functions (void);
143 static void cgraph_mark_functions_to_output (void);
144 static void cgraph_expand_function (struct cgraph_node *);
145 static void cgraph_output_pending_asms (void);
146 static void cgraph_analyze_function (struct cgraph_node *);
147
148 FILE *cgraph_dump_file;
149
150 /* Used for vtable lookup in thunk adjusting. */
151 static GTY (()) tree vtable_entry_type;
152
153 /* Determine if function DECL is needed. That is, visible to something
154 either outside this translation unit, something magic in the system
155 configury. */
156
157 bool
158 cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
159 {
160 /* If the user told us it is used, then it must be so. */
161 if (node->local.externally_visible)
162 return true;
163
164 /* ??? If the assembler name is set by hand, it is possible to assemble
165 the name later after finalizing the function and the fact is noticed
166 in assemble_name then. This is arguably a bug. */
167 if (DECL_ASSEMBLER_NAME_SET_P (decl)
168 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
169 return true;
170
171 /* With -fkeep-inline-functions we are keeping all inline functions except
172 for extern inline ones. */
173 if (flag_keep_inline_functions
174 && DECL_DECLARED_INLINE_P (decl)
175 && !DECL_EXTERNAL (decl)
176 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl)))
177 return true;
178
179 /* If we decided it was needed before, but at the time we didn't have
180 the body of the function available, then it's still needed. We have
181 to go back and re-check its dependencies now. */
182 if (node->needed)
183 return true;
184
185 /* Externally visible functions must be output. The exception is
186 COMDAT functions that must be output only when they are needed.
187
188 When not optimizing, also output the static functions. (see
189 PR24561), but don't do so for always_inline functions, functions
190 declared inline and nested functions. These were optimized out
191 in the original implementation and it is unclear whether we want
192 to change the behavior here. */
193 if (((TREE_PUBLIC (decl)
194 || (!optimize
195 && !node->local.disregard_inline_limits
196 && !DECL_DECLARED_INLINE_P (decl)
197 && !(DECL_CONTEXT (decl)
198 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)))
199 && !flag_whole_program
200 && !flag_lto)
201 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
202 return true;
203
204 return false;
205 }
206
207 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
208 functions into callgraph in a way so they look like ordinary reachable
209 functions inserted into callgraph already at construction time. */
210
211 bool
212 cgraph_process_new_functions (void)
213 {
214 bool output = false;
215 tree fndecl;
216 struct cgraph_node *node;
217
218 varpool_analyze_pending_decls ();
219 /* Note that this queue may grow as its being processed, as the new
220 functions may generate new ones. */
221 while (cgraph_new_nodes)
222 {
223 node = cgraph_new_nodes;
224 fndecl = node->decl;
225 cgraph_new_nodes = cgraph_new_nodes->next_needed;
226 switch (cgraph_state)
227 {
228 case CGRAPH_STATE_CONSTRUCTION:
229 /* At construction time we just need to finalize function and move
230 it into reachable functions list. */
231
232 node->next_needed = NULL;
233 cgraph_finalize_function (fndecl, false);
234 cgraph_mark_reachable_node (node);
235 output = true;
236 break;
237
238 case CGRAPH_STATE_IPA:
239 case CGRAPH_STATE_IPA_SSA:
240 /* When IPA optimization already started, do all essential
241 transformations that has been already performed on the whole
242 cgraph but not on this function. */
243
244 gimple_register_cfg_hooks ();
245 if (!node->analyzed)
246 cgraph_analyze_function (node);
247 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
248 current_function_decl = fndecl;
249 compute_inline_parameters (node);
250 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
251 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
252 /* When not optimizing, be sure we run early local passes anyway
253 to expand OMP. */
254 || !optimize)
255 execute_pass_list (pass_early_local_passes.pass.sub);
256 free_dominance_info (CDI_POST_DOMINATORS);
257 free_dominance_info (CDI_DOMINATORS);
258 pop_cfun ();
259 current_function_decl = NULL;
260 break;
261
262 case CGRAPH_STATE_EXPANSION:
263 /* Functions created during expansion shall be compiled
264 directly. */
265 node->process = 0;
266 cgraph_expand_function (node);
267 break;
268
269 default:
270 gcc_unreachable ();
271 break;
272 }
273 cgraph_call_function_insertion_hooks (node);
274 varpool_analyze_pending_decls ();
275 }
276 return output;
277 }
278
279 /* As an GCC extension we allow redefinition of the function. The
280 semantics when both copies of bodies differ is not well defined.
281 We replace the old body with new body so in unit at a time mode
282 we always use new body, while in normal mode we may end up with
283 old body inlined into some functions and new body expanded and
284 inlined in others.
285
286 ??? It may make more sense to use one body for inlining and other
287 body for expanding the function but this is difficult to do. */
288
289 static void
290 cgraph_reset_node (struct cgraph_node *node)
291 {
292 /* If node->process is set, then we have already begun whole-unit analysis.
293 This is *not* testing for whether we've already emitted the function.
294 That case can be sort-of legitimately seen with real function redefinition
295 errors. I would argue that the front end should never present us with
296 such a case, but don't enforce that for now. */
297 gcc_assert (!node->process);
298
299 /* Reset our data structures so we can analyze the function again. */
300 memset (&node->local, 0, sizeof (node->local));
301 memset (&node->global, 0, sizeof (node->global));
302 memset (&node->rtl, 0, sizeof (node->rtl));
303 node->analyzed = false;
304 node->local.redefined_extern_inline = true;
305 node->local.finalized = false;
306
307 cgraph_node_remove_callees (node);
308
309 /* We may need to re-queue the node for assembling in case
310 we already proceeded it and ignored as not needed or got
311 a re-declaration in IMA mode. */
312 if (node->reachable)
313 {
314 struct cgraph_node *n;
315
316 for (n = cgraph_nodes_queue; n; n = n->next_needed)
317 if (n == node)
318 break;
319 if (!n)
320 node->reachable = 0;
321 }
322 }
323
324 static void
325 cgraph_lower_function (struct cgraph_node *node)
326 {
327 if (node->lowered)
328 return;
329
330 if (node->nested)
331 lower_nested_functions (node->decl);
332 gcc_assert (!node->nested);
333
334 tree_lowering_passes (node->decl);
335 node->lowered = true;
336 }
337
338 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
339 logic in effect. If NESTED is true, then our caller cannot stand to have
340 the garbage collector run at the moment. We would need to either create
341 a new GC context, or just not compile right now. */
342
343 void
344 cgraph_finalize_function (tree decl, bool nested)
345 {
346 struct cgraph_node *node = cgraph_node (decl);
347
348 if (node->local.finalized)
349 cgraph_reset_node (node);
350
351 node->pid = cgraph_max_pid ++;
352 notice_global_symbol (decl);
353 node->local.finalized = true;
354 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
355 node->finalized_by_frontend = true;
356
357 if (cgraph_decide_is_function_needed (node, decl))
358 cgraph_mark_needed_node (node);
359
360 /* Since we reclaim unreachable nodes at the end of every language
361 level unit, we need to be conservative about possible entry points
362 there. */
363 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
364 || DECL_STATIC_CONSTRUCTOR (decl)
365 || DECL_STATIC_DESTRUCTOR (decl)
366 /* COMDAT virtual functions may be referenced by vtable from
367 other compilatoin unit. Still we want to devirtualize calls
368 to those so we need to analyze them.
369 FIXME: We should introduce may edges for this purpose and update
370 their handling in unreachable function removal and inliner too. */
371 || (DECL_VIRTUAL_P (decl) && (DECL_COMDAT (decl) || DECL_EXTERNAL (decl))))
372 cgraph_mark_reachable_node (node);
373
374 /* If we've not yet emitted decl, tell the debug info about it. */
375 if (!TREE_ASM_WRITTEN (decl))
376 (*debug_hooks->deferred_inline_function) (decl);
377
378 /* Possibly warn about unused parameters. */
379 if (warn_unused_parameter)
380 do_warn_unused_parameter (decl);
381
382 if (!nested)
383 ggc_collect ();
384 }
385
386 /* C99 extern inline keywords allow changing of declaration after function
387 has been finalized. We need to re-decide if we want to mark the function as
388 needed then. */
389
390 void
391 cgraph_mark_if_needed (tree decl)
392 {
393 struct cgraph_node *node = cgraph_node (decl);
394 if (node->local.finalized && cgraph_decide_is_function_needed (node, decl))
395 cgraph_mark_needed_node (node);
396 }
397
398 /* Return TRUE if NODE2 is equivalent to NODE or its clone. */
399 static bool
400 clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
401 {
402 while (node != node2 && node2)
403 node2 = node2->clone_of;
404 return node2 != NULL;
405 }
406
407 /* Verify edge E count and frequency. */
408
409 static bool
410 verify_edge_count_and_frequency (struct cgraph_edge *e)
411 {
412 bool error_found = false;
413 if (e->count < 0)
414 {
415 error ("caller edge count is negative");
416 error_found = true;
417 }
418 if (e->frequency < 0)
419 {
420 error ("caller edge frequency is negative");
421 error_found = true;
422 }
423 if (e->frequency > CGRAPH_FREQ_MAX)
424 {
425 error ("caller edge frequency is too large");
426 error_found = true;
427 }
428 if (gimple_has_body_p (e->caller->decl)
429 && !e->caller->global.inlined_to
430 && (e->frequency
431 != compute_call_stmt_bb_frequency (e->caller->decl,
432 gimple_bb (e->call_stmt))))
433 {
434 error ("caller edge frequency %i does not match BB freqency %i",
435 e->frequency,
436 compute_call_stmt_bb_frequency (e->caller->decl,
437 gimple_bb (e->call_stmt)));
438 error_found = true;
439 }
440 return error_found;
441 }
442
443 /* Switch to THIS_CFUN if needed and print STMT to stderr. */
444 static void
445 cgraph_debug_gimple_stmt (struct function *this_cfun, gimple stmt)
446 {
447 /* debug_gimple_stmt needs correct cfun */
448 if (cfun != this_cfun)
449 set_cfun (this_cfun);
450 debug_gimple_stmt (stmt);
451 }
452
453 /* Verify cgraph nodes of given cgraph node. */
454 DEBUG_FUNCTION void
455 verify_cgraph_node (struct cgraph_node *node)
456 {
457 struct cgraph_edge *e;
458 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
459 basic_block this_block;
460 gimple_stmt_iterator gsi;
461 bool error_found = false;
462
463 if (seen_error ())
464 return;
465
466 timevar_push (TV_CGRAPH_VERIFY);
467 for (e = node->callees; e; e = e->next_callee)
468 if (e->aux)
469 {
470 error ("aux field set for edge %s->%s",
471 identifier_to_locale (cgraph_node_name (e->caller)),
472 identifier_to_locale (cgraph_node_name (e->callee)));
473 error_found = true;
474 }
475 if (node->count < 0)
476 {
477 error ("execution count is negative");
478 error_found = true;
479 }
480 if (node->global.inlined_to && node->local.externally_visible)
481 {
482 error ("externally visible inline clone");
483 error_found = true;
484 }
485 if (node->global.inlined_to && node->address_taken)
486 {
487 error ("inline clone with address taken");
488 error_found = true;
489 }
490 if (node->global.inlined_to && node->needed)
491 {
492 error ("inline clone is needed");
493 error_found = true;
494 }
495 for (e = node->indirect_calls; e; e = e->next_callee)
496 {
497 if (e->aux)
498 {
499 error ("aux field set for indirect edge from %s",
500 identifier_to_locale (cgraph_node_name (e->caller)));
501 error_found = true;
502 }
503 if (!e->indirect_unknown_callee
504 || !e->indirect_info)
505 {
506 error ("An indirect edge from %s is not marked as indirect or has "
507 "associated indirect_info, the corresponding statement is: ",
508 identifier_to_locale (cgraph_node_name (e->caller)));
509 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
510 error_found = true;
511 }
512 }
513 for (e = node->callers; e; e = e->next_caller)
514 {
515 if (verify_edge_count_and_frequency (e))
516 error_found = true;
517 if (!e->inline_failed)
518 {
519 if (node->global.inlined_to
520 != (e->caller->global.inlined_to
521 ? e->caller->global.inlined_to : e->caller))
522 {
523 error ("inlined_to pointer is wrong");
524 error_found = true;
525 }
526 if (node->callers->next_caller)
527 {
528 error ("multiple inline callers");
529 error_found = true;
530 }
531 }
532 else
533 if (node->global.inlined_to)
534 {
535 error ("inlined_to pointer set for noninline callers");
536 error_found = true;
537 }
538 }
539 for (e = node->indirect_calls; e; e = e->next_callee)
540 if (verify_edge_count_and_frequency (e))
541 error_found = true;
542 if (!node->callers && node->global.inlined_to)
543 {
544 error ("inlined_to pointer is set but no predecessors found");
545 error_found = true;
546 }
547 if (node->global.inlined_to == node)
548 {
549 error ("inlined_to pointer refers to itself");
550 error_found = true;
551 }
552
553 if (!cgraph_node (node->decl))
554 {
555 error ("node not found in cgraph_hash");
556 error_found = true;
557 }
558
559 if (node->clone_of)
560 {
561 struct cgraph_node *n;
562 for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
563 if (n == node)
564 break;
565 if (!n)
566 {
567 error ("node has wrong clone_of");
568 error_found = true;
569 }
570 }
571 if (node->clones)
572 {
573 struct cgraph_node *n;
574 for (n = node->clones; n; n = n->next_sibling_clone)
575 if (n->clone_of != node)
576 break;
577 if (n)
578 {
579 error ("node has wrong clone list");
580 error_found = true;
581 }
582 }
583 if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
584 {
585 error ("node is in clone list but it is not clone");
586 error_found = true;
587 }
588 if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
589 {
590 error ("node has wrong prev_clone pointer");
591 error_found = true;
592 }
593 if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
594 {
595 error ("double linked list of clones corrupted");
596 error_found = true;
597 }
598 if (node->same_comdat_group)
599 {
600 struct cgraph_node *n = node->same_comdat_group;
601
602 if (!DECL_ONE_ONLY (node->decl))
603 {
604 error ("non-DECL_ONE_ONLY node in a same_comdat_group list");
605 error_found = true;
606 }
607 if (n == node)
608 {
609 error ("node is alone in a comdat group");
610 error_found = true;
611 }
612 do
613 {
614 if (!n->same_comdat_group)
615 {
616 error ("same_comdat_group is not a circular list");
617 error_found = true;
618 break;
619 }
620 n = n->same_comdat_group;
621 }
622 while (n != node);
623 }
624
625 if (node->analyzed && gimple_has_body_p (node->decl)
626 && !TREE_ASM_WRITTEN (node->decl)
627 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
628 && !flag_wpa)
629 {
630 if (this_cfun->cfg)
631 {
632 /* The nodes we're interested in are never shared, so walk
633 the tree ignoring duplicates. */
634 struct pointer_set_t *visited_nodes = pointer_set_create ();
635 /* Reach the trees by walking over the CFG, and note the
636 enclosing basic-blocks in the call edges. */
637 FOR_EACH_BB_FN (this_block, this_cfun)
638 for (gsi = gsi_start_bb (this_block);
639 !gsi_end_p (gsi);
640 gsi_next (&gsi))
641 {
642 gimple stmt = gsi_stmt (gsi);
643 if (is_gimple_call (stmt))
644 {
645 struct cgraph_edge *e = cgraph_edge (node, stmt);
646 tree decl = gimple_call_fndecl (stmt);
647 if (e)
648 {
649 if (e->aux)
650 {
651 error ("shared call_stmt:");
652 cgraph_debug_gimple_stmt (this_cfun, stmt);
653 error_found = true;
654 }
655 if (!e->indirect_unknown_callee)
656 {
657 struct cgraph_node *n;
658
659 if (e->callee->same_body_alias)
660 {
661 error ("edge points to same body alias:");
662 debug_tree (e->callee->decl);
663 error_found = true;
664 }
665 else if (!e->callee->global.inlined_to
666 && decl
667 && cgraph_get_node (decl)
668 && (e->callee->former_clone_of
669 != cgraph_get_node (decl)->decl)
670 && !clone_of_p (cgraph_node (decl),
671 e->callee))
672 {
673 error ("edge points to wrong declaration:");
674 debug_tree (e->callee->decl);
675 fprintf (stderr," Instead of:");
676 debug_tree (decl);
677 error_found = true;
678 }
679 else if (decl
680 && (n = cgraph_get_node_or_alias (decl))
681 && (n->same_body_alias
682 && n->thunk.thunk_p))
683 {
684 error ("a call to thunk improperly represented "
685 "in the call graph:");
686 cgraph_debug_gimple_stmt (this_cfun, stmt);
687 error_found = true;
688 }
689 }
690 else if (decl)
691 {
692 error ("an indirect edge with unknown callee "
693 "corresponding to a call_stmt with "
694 "a known declaration:");
695 error_found = true;
696 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
697 }
698 e->aux = (void *)1;
699 }
700 else if (decl)
701 {
702 error ("missing callgraph edge for call stmt:");
703 cgraph_debug_gimple_stmt (this_cfun, stmt);
704 error_found = true;
705 }
706 }
707 }
708 pointer_set_destroy (visited_nodes);
709 }
710 else
711 /* No CFG available?! */
712 gcc_unreachable ();
713
714 for (e = node->callees; e; e = e->next_callee)
715 {
716 if (!e->aux)
717 {
718 error ("edge %s->%s has no corresponding call_stmt",
719 identifier_to_locale (cgraph_node_name (e->caller)),
720 identifier_to_locale (cgraph_node_name (e->callee)));
721 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
722 error_found = true;
723 }
724 e->aux = 0;
725 }
726 for (e = node->indirect_calls; e; e = e->next_callee)
727 {
728 if (!e->aux)
729 {
730 error ("an indirect edge from %s has no corresponding call_stmt",
731 identifier_to_locale (cgraph_node_name (e->caller)));
732 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
733 error_found = true;
734 }
735 e->aux = 0;
736 }
737 }
738 if (error_found)
739 {
740 dump_cgraph_node (stderr, node);
741 internal_error ("verify_cgraph_node failed");
742 }
743 timevar_pop (TV_CGRAPH_VERIFY);
744 }
745
746 /* Verify whole cgraph structure. */
747 DEBUG_FUNCTION void
748 verify_cgraph (void)
749 {
750 struct cgraph_node *node;
751
752 if (seen_error ())
753 return;
754
755 for (node = cgraph_nodes; node; node = node->next)
756 verify_cgraph_node (node);
757 }
758
759 /* Output all asm statements we have stored up to be output. */
760
761 static void
762 cgraph_output_pending_asms (void)
763 {
764 struct cgraph_asm_node *can;
765
766 if (seen_error ())
767 return;
768
769 for (can = cgraph_asm_nodes; can; can = can->next)
770 assemble_asm (can->asm_str);
771 cgraph_asm_nodes = NULL;
772 }
773
774 /* Analyze the function scheduled to be output. */
775 static void
776 cgraph_analyze_function (struct cgraph_node *node)
777 {
778 tree save = current_function_decl;
779 tree decl = node->decl;
780
781 current_function_decl = decl;
782 push_cfun (DECL_STRUCT_FUNCTION (decl));
783
784 assign_assembler_name_if_neeeded (node->decl);
785
786 /* Make sure to gimplify bodies only once. During analyzing a
787 function we lower it, which will require gimplified nested
788 functions, so we can end up here with an already gimplified
789 body. */
790 if (!gimple_body (decl))
791 gimplify_function_tree (decl);
792 dump_function (TDI_generic, decl);
793
794 cgraph_lower_function (node);
795 node->analyzed = true;
796
797 pop_cfun ();
798 current_function_decl = save;
799 }
800
801 /* Process attributes common for vars and functions. */
802
803 static void
804 process_common_attributes (tree decl)
805 {
806 tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
807
808 if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
809 {
810 warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
811 "%<weakref%> attribute should be accompanied with"
812 " an %<alias%> attribute");
813 DECL_WEAK (decl) = 0;
814 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
815 DECL_ATTRIBUTES (decl));
816 }
817 }
818
819 /* Look for externally_visible and used attributes and mark cgraph nodes
820 accordingly.
821
822 We cannot mark the nodes at the point the attributes are processed (in
823 handle_*_attribute) because the copy of the declarations available at that
824 point may not be canonical. For example, in:
825
826 void f();
827 void f() __attribute__((used));
828
829 the declaration we see in handle_used_attribute will be the second
830 declaration -- but the front end will subsequently merge that declaration
831 with the original declaration and discard the second declaration.
832
833 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
834
835 void f() {}
836 void f() __attribute__((externally_visible));
837
838 is valid.
839
840 So, we walk the nodes at the end of the translation unit, applying the
841 attributes at that point. */
842
843 static void
844 process_function_and_variable_attributes (struct cgraph_node *first,
845 struct varpool_node *first_var)
846 {
847 struct cgraph_node *node;
848 struct varpool_node *vnode;
849
850 for (node = cgraph_nodes; node != first; node = node->next)
851 {
852 tree decl = node->decl;
853 if (DECL_PRESERVE_P (decl))
854 cgraph_mark_needed_node (node);
855 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
856 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl))
857 && TREE_PUBLIC (node->decl))
858 {
859 if (node->local.finalized)
860 cgraph_mark_needed_node (node);
861 }
862 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
863 {
864 if (! TREE_PUBLIC (node->decl))
865 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
866 "%<externally_visible%>"
867 " attribute have effect only on public objects");
868 else if (node->local.finalized)
869 cgraph_mark_needed_node (node);
870 }
871 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
872 && node->local.finalized)
873 {
874 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
875 "%<weakref%> attribute ignored"
876 " because function is defined");
877 DECL_WEAK (decl) = 0;
878 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
879 DECL_ATTRIBUTES (decl));
880 }
881 process_common_attributes (decl);
882 }
883 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
884 {
885 tree decl = vnode->decl;
886 if (DECL_PRESERVE_P (decl))
887 {
888 vnode->force_output = true;
889 if (vnode->finalized)
890 varpool_mark_needed_node (vnode);
891 }
892 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
893 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl))
894 && TREE_PUBLIC (vnode->decl))
895 {
896 if (vnode->finalized)
897 varpool_mark_needed_node (vnode);
898 }
899 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
900 {
901 if (! TREE_PUBLIC (vnode->decl))
902 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
903 "%<externally_visible%>"
904 " attribute have effect only on public objects");
905 else if (vnode->finalized)
906 varpool_mark_needed_node (vnode);
907 }
908 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
909 && vnode->finalized
910 && DECL_INITIAL (decl))
911 {
912 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
913 "%<weakref%> attribute ignored"
914 " because variable is initialized");
915 DECL_WEAK (decl) = 0;
916 DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
917 DECL_ATTRIBUTES (decl));
918 }
919 process_common_attributes (decl);
920 }
921 }
922
923 /* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
924 each reachable functions) and build cgraph.
925 The function can be called multiple times after inserting new nodes
926 into beginning of queue. Just the new part of queue is re-scanned then. */
927
928 static void
929 cgraph_analyze_functions (void)
930 {
931 /* Keep track of already processed nodes when called multiple times for
932 intermodule optimization. */
933 static struct cgraph_node *first_analyzed;
934 struct cgraph_node *first_processed = first_analyzed;
935 static struct varpool_node *first_analyzed_var;
936 struct cgraph_node *node, *next;
937
938 bitmap_obstack_initialize (NULL);
939 process_function_and_variable_attributes (first_processed,
940 first_analyzed_var);
941 first_processed = cgraph_nodes;
942 first_analyzed_var = varpool_nodes;
943 varpool_analyze_pending_decls ();
944 if (cgraph_dump_file)
945 {
946 fprintf (cgraph_dump_file, "Initial entry points:");
947 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
948 if (node->needed)
949 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
950 fprintf (cgraph_dump_file, "\n");
951 }
952 cgraph_process_new_functions ();
953
954 /* Propagate reachability flag and lower representation of all reachable
955 functions. In the future, lowering will introduce new functions and
956 new entry points on the way (by template instantiation and virtual
957 method table generation for instance). */
958 while (cgraph_nodes_queue)
959 {
960 struct cgraph_edge *edge;
961 tree decl = cgraph_nodes_queue->decl;
962
963 node = cgraph_nodes_queue;
964 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
965 node->next_needed = NULL;
966
967 /* ??? It is possible to create extern inline function and later using
968 weak alias attribute to kill its body. See
969 gcc.c-torture/compile/20011119-1.c */
970 if (!DECL_STRUCT_FUNCTION (decl))
971 {
972 cgraph_reset_node (node);
973 continue;
974 }
975
976 if (!node->analyzed)
977 cgraph_analyze_function (node);
978
979 for (edge = node->callees; edge; edge = edge->next_callee)
980 if (!edge->callee->reachable)
981 cgraph_mark_reachable_node (edge->callee);
982
983 if (node->same_comdat_group)
984 {
985 for (next = node->same_comdat_group;
986 next != node;
987 next = next->same_comdat_group)
988 cgraph_mark_reachable_node (next);
989 }
990
991 /* If decl is a clone of an abstract function, mark that abstract
992 function so that we don't release its body. The DECL_INITIAL() of that
993 abstract function declaration will be later needed to output debug info. */
994 if (DECL_ABSTRACT_ORIGIN (decl))
995 {
996 struct cgraph_node *origin_node = cgraph_node (DECL_ABSTRACT_ORIGIN (decl));
997 origin_node->abstract_and_needed = true;
998 }
999
1000 /* We finalize local static variables during constructing callgraph
1001 edges. Process their attributes too. */
1002 process_function_and_variable_attributes (first_processed,
1003 first_analyzed_var);
1004 first_processed = cgraph_nodes;
1005 first_analyzed_var = varpool_nodes;
1006 varpool_analyze_pending_decls ();
1007 cgraph_process_new_functions ();
1008 }
1009
1010 /* Collect entry points to the unit. */
1011 if (cgraph_dump_file)
1012 {
1013 fprintf (cgraph_dump_file, "Unit entry points:");
1014 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1015 if (node->needed)
1016 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1017 fprintf (cgraph_dump_file, "\n\nInitial ");
1018 dump_cgraph (cgraph_dump_file);
1019 dump_varpool (cgraph_dump_file);
1020 }
1021
1022 if (cgraph_dump_file)
1023 fprintf (cgraph_dump_file, "\nReclaiming functions:");
1024
1025 for (node = cgraph_nodes; node != first_analyzed; node = next)
1026 {
1027 tree decl = node->decl;
1028 next = node->next;
1029
1030 if (node->local.finalized && !gimple_has_body_p (decl))
1031 cgraph_reset_node (node);
1032
1033 if (!node->reachable && gimple_has_body_p (decl))
1034 {
1035 if (cgraph_dump_file)
1036 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1037 cgraph_remove_node (node);
1038 continue;
1039 }
1040 else
1041 node->next_needed = NULL;
1042 gcc_assert (!node->local.finalized || gimple_has_body_p (decl));
1043 gcc_assert (node->analyzed == node->local.finalized);
1044 }
1045 if (cgraph_dump_file)
1046 {
1047 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1048 dump_cgraph (cgraph_dump_file);
1049 dump_varpool (cgraph_dump_file);
1050 }
1051 bitmap_obstack_release (NULL);
1052 first_analyzed = cgraph_nodes;
1053 ggc_collect ();
1054 }
1055
1056
1057 /* Analyze the whole compilation unit once it is parsed completely. */
1058
1059 void
1060 cgraph_finalize_compilation_unit (void)
1061 {
1062 timevar_push (TV_CGRAPH);
1063
1064 /* Do not skip analyzing the functions if there were errors, we
1065 miss diagnostics for following functions otherwise. */
1066
1067 /* Emit size functions we didn't inline. */
1068 finalize_size_functions ();
1069
1070 /* Mark alias targets necessary and emit diagnostics. */
1071 finish_aliases_1 ();
1072
1073 if (!quiet_flag)
1074 {
1075 fprintf (stderr, "\nAnalyzing compilation unit\n");
1076 fflush (stderr);
1077 }
1078
1079 /* Gimplify and lower all functions, compute reachability and
1080 remove unreachable nodes. */
1081 cgraph_analyze_functions ();
1082
1083 /* Mark alias targets necessary and emit diagnostics. */
1084 finish_aliases_1 ();
1085
1086 /* Gimplify and lower thunks. */
1087 cgraph_analyze_functions ();
1088
1089 /* Finally drive the pass manager. */
1090 cgraph_optimize ();
1091
1092 timevar_pop (TV_CGRAPH);
1093 }
1094
1095
1096 /* Figure out what functions we want to assemble. */
1097
1098 static void
1099 cgraph_mark_functions_to_output (void)
1100 {
1101 struct cgraph_node *node;
1102 #ifdef ENABLE_CHECKING
1103 bool check_same_comdat_groups = false;
1104
1105 for (node = cgraph_nodes; node; node = node->next)
1106 gcc_assert (!node->process);
1107 #endif
1108
1109 for (node = cgraph_nodes; node; node = node->next)
1110 {
1111 tree decl = node->decl;
1112 struct cgraph_edge *e;
1113
1114 gcc_assert (!node->process || node->same_comdat_group);
1115 if (node->process)
1116 continue;
1117
1118 for (e = node->callers; e; e = e->next_caller)
1119 if (e->inline_failed)
1120 break;
1121
1122 /* We need to output all local functions that are used and not
1123 always inlined, as well as those that are reachable from
1124 outside the current compilation unit. */
1125 if (node->analyzed
1126 && !node->global.inlined_to
1127 && (!cgraph_only_called_directly_p (node)
1128 || (e && node->reachable))
1129 && !TREE_ASM_WRITTEN (decl)
1130 && !DECL_EXTERNAL (decl))
1131 {
1132 node->process = 1;
1133 if (node->same_comdat_group)
1134 {
1135 struct cgraph_node *next;
1136 for (next = node->same_comdat_group;
1137 next != node;
1138 next = next->same_comdat_group)
1139 next->process = 1;
1140 }
1141 }
1142 else if (node->same_comdat_group)
1143 {
1144 #ifdef ENABLE_CHECKING
1145 check_same_comdat_groups = true;
1146 #endif
1147 }
1148 else
1149 {
1150 /* We should've reclaimed all functions that are not needed. */
1151 #ifdef ENABLE_CHECKING
1152 if (!node->global.inlined_to
1153 && gimple_has_body_p (decl)
1154 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1155 are inside partition, we can end up not removing the body since we no longer
1156 have analyzed node pointing to it. */
1157 && !node->in_other_partition
1158 && !DECL_EXTERNAL (decl))
1159 {
1160 dump_cgraph_node (stderr, node);
1161 internal_error ("failed to reclaim unneeded function");
1162 }
1163 #endif
1164 gcc_assert (node->global.inlined_to
1165 || !gimple_has_body_p (decl)
1166 || node->in_other_partition
1167 || DECL_EXTERNAL (decl));
1168
1169 }
1170
1171 }
1172 #ifdef ENABLE_CHECKING
1173 if (check_same_comdat_groups)
1174 for (node = cgraph_nodes; node; node = node->next)
1175 if (node->same_comdat_group && !node->process)
1176 {
1177 tree decl = node->decl;
1178 if (!node->global.inlined_to
1179 && gimple_has_body_p (decl)
1180 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1181 are inside partition, we can end up not removing the body since we no longer
1182 have analyzed node pointing to it. */
1183 && !node->in_other_partition
1184 && !DECL_EXTERNAL (decl))
1185 {
1186 dump_cgraph_node (stderr, node);
1187 internal_error ("failed to reclaim unneeded function");
1188 }
1189 }
1190 #endif
1191 }
1192
1193 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1194 in lowered gimple form.
1195
1196 Set current_function_decl and cfun to newly constructed empty function body.
1197 return basic block in the function body. */
1198
1199 static basic_block
1200 init_lowered_empty_function (tree decl)
1201 {
1202 basic_block bb;
1203
1204 current_function_decl = decl;
1205 allocate_struct_function (decl, false);
1206 gimple_register_cfg_hooks ();
1207 init_empty_tree_cfg ();
1208 init_tree_ssa (cfun);
1209 init_ssa_operands ();
1210 cfun->gimple_df->in_ssa_p = true;
1211 DECL_INITIAL (decl) = make_node (BLOCK);
1212
1213 DECL_SAVED_TREE (decl) = error_mark_node;
1214 cfun->curr_properties |=
1215 (PROP_gimple_lcf | PROP_gimple_leh | PROP_cfg | PROP_referenced_vars |
1216 PROP_ssa);
1217
1218 /* Create BB for body of the function and connect it properly. */
1219 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
1220 make_edge (ENTRY_BLOCK_PTR, bb, 0);
1221 make_edge (bb, EXIT_BLOCK_PTR, 0);
1222
1223 return bb;
1224 }
1225
1226 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1227 offset indicated by VIRTUAL_OFFSET, if that is
1228 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1229 zero for a result adjusting thunk. */
1230
1231 static tree
1232 thunk_adjust (gimple_stmt_iterator * bsi,
1233 tree ptr, bool this_adjusting,
1234 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1235 {
1236 gimple stmt;
1237 tree ret;
1238
1239 if (this_adjusting
1240 && fixed_offset != 0)
1241 {
1242 stmt = gimple_build_assign (ptr,
1243 fold_build2_loc (input_location,
1244 POINTER_PLUS_EXPR,
1245 TREE_TYPE (ptr), ptr,
1246 size_int (fixed_offset)));
1247 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1248 }
1249
1250 /* If there's a virtual offset, look up that value in the vtable and
1251 adjust the pointer again. */
1252 if (virtual_offset)
1253 {
1254 tree vtabletmp;
1255 tree vtabletmp2;
1256 tree vtabletmp3;
1257 tree offsettmp;
1258
1259 if (!vtable_entry_type)
1260 {
1261 tree vfunc_type = make_node (FUNCTION_TYPE);
1262 TREE_TYPE (vfunc_type) = integer_type_node;
1263 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1264 layout_type (vfunc_type);
1265
1266 vtable_entry_type = build_pointer_type (vfunc_type);
1267 }
1268
1269 vtabletmp =
1270 create_tmp_var (build_pointer_type
1271 (build_pointer_type (vtable_entry_type)), "vptr");
1272
1273 /* The vptr is always at offset zero in the object. */
1274 stmt = gimple_build_assign (vtabletmp,
1275 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1276 ptr));
1277 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1278 mark_symbols_for_renaming (stmt);
1279 find_referenced_vars_in (stmt);
1280
1281 /* Form the vtable address. */
1282 vtabletmp2 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp)),
1283 "vtableaddr");
1284 stmt = gimple_build_assign (vtabletmp2,
1285 build_simple_mem_ref (vtabletmp));
1286 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1287 mark_symbols_for_renaming (stmt);
1288 find_referenced_vars_in (stmt);
1289
1290 /* Find the entry with the vcall offset. */
1291 stmt = gimple_build_assign (vtabletmp2,
1292 fold_build2_loc (input_location,
1293 POINTER_PLUS_EXPR,
1294 TREE_TYPE (vtabletmp2),
1295 vtabletmp2,
1296 fold_convert (sizetype,
1297 virtual_offset)));
1298 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1299
1300 /* Get the offset itself. */
1301 vtabletmp3 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1302 "vcalloffset");
1303 stmt = gimple_build_assign (vtabletmp3,
1304 build_simple_mem_ref (vtabletmp2));
1305 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1306 mark_symbols_for_renaming (stmt);
1307 find_referenced_vars_in (stmt);
1308
1309 /* Cast to sizetype. */
1310 offsettmp = create_tmp_var (sizetype, "offset");
1311 stmt = gimple_build_assign (offsettmp, fold_convert (sizetype, vtabletmp3));
1312 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1313 mark_symbols_for_renaming (stmt);
1314 find_referenced_vars_in (stmt);
1315
1316 /* Adjust the `this' pointer. */
1317 ptr = fold_build2_loc (input_location,
1318 POINTER_PLUS_EXPR, TREE_TYPE (ptr), ptr,
1319 offsettmp);
1320 }
1321
1322 if (!this_adjusting
1323 && fixed_offset != 0)
1324 /* Adjust the pointer by the constant. */
1325 {
1326 tree ptrtmp;
1327
1328 if (TREE_CODE (ptr) == VAR_DECL)
1329 ptrtmp = ptr;
1330 else
1331 {
1332 ptrtmp = create_tmp_var (TREE_TYPE (ptr), "ptr");
1333 stmt = gimple_build_assign (ptrtmp, ptr);
1334 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1335 mark_symbols_for_renaming (stmt);
1336 find_referenced_vars_in (stmt);
1337 }
1338 ptr = fold_build2_loc (input_location,
1339 POINTER_PLUS_EXPR, TREE_TYPE (ptrtmp), ptrtmp,
1340 size_int (fixed_offset));
1341 }
1342
1343 /* Emit the statement and gimplify the adjustment expression. */
1344 ret = create_tmp_var (TREE_TYPE (ptr), "adjusted_this");
1345 stmt = gimple_build_assign (ret, ptr);
1346 mark_symbols_for_renaming (stmt);
1347 find_referenced_vars_in (stmt);
1348 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1349
1350 return ret;
1351 }
1352
1353 /* Produce assembler for thunk NODE. */
1354
1355 static void
1356 assemble_thunk (struct cgraph_node *node)
1357 {
1358 bool this_adjusting = node->thunk.this_adjusting;
1359 HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1360 HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1361 tree virtual_offset = NULL;
1362 tree alias = node->thunk.alias;
1363 tree thunk_fndecl = node->decl;
1364 tree a = DECL_ARGUMENTS (thunk_fndecl);
1365
1366 current_function_decl = thunk_fndecl;
1367
1368 /* Ensure thunks are emitted in their correct sections. */
1369 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1370
1371 if (this_adjusting
1372 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1373 virtual_value, alias))
1374 {
1375 const char *fnname;
1376 tree fn_block;
1377
1378 DECL_RESULT (thunk_fndecl)
1379 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1380 RESULT_DECL, 0, integer_type_node);
1381 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1382
1383 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1384 create one. */
1385 fn_block = make_node (BLOCK);
1386 BLOCK_VARS (fn_block) = a;
1387 DECL_INITIAL (thunk_fndecl) = fn_block;
1388 init_function_start (thunk_fndecl);
1389 cfun->is_thunk = 1;
1390 assemble_start_function (thunk_fndecl, fnname);
1391
1392 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1393 fixed_offset, virtual_value, alias);
1394
1395 assemble_end_function (thunk_fndecl, fnname);
1396 init_insn_lengths ();
1397 free_after_compilation (cfun);
1398 set_cfun (NULL);
1399 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1400 }
1401 else
1402 {
1403 tree restype;
1404 basic_block bb, then_bb, else_bb, return_bb;
1405 gimple_stmt_iterator bsi;
1406 int nargs = 0;
1407 tree arg;
1408 int i;
1409 tree resdecl;
1410 tree restmp = NULL;
1411 VEC(tree, heap) *vargs;
1412
1413 gimple call;
1414 gimple ret;
1415
1416 DECL_IGNORED_P (thunk_fndecl) = 1;
1417 bitmap_obstack_initialize (NULL);
1418
1419 if (node->thunk.virtual_offset_p)
1420 virtual_offset = size_int (virtual_value);
1421
1422 /* Build the return declaration for the function. */
1423 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1424 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1425 {
1426 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1427 DECL_ARTIFICIAL (resdecl) = 1;
1428 DECL_IGNORED_P (resdecl) = 1;
1429 DECL_RESULT (thunk_fndecl) = resdecl;
1430 }
1431 else
1432 resdecl = DECL_RESULT (thunk_fndecl);
1433
1434 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl);
1435
1436 bsi = gsi_start_bb (bb);
1437
1438 /* Build call to the function being thunked. */
1439 if (!VOID_TYPE_P (restype))
1440 {
1441 if (!is_gimple_reg_type (restype))
1442 {
1443 restmp = resdecl;
1444 add_local_decl (cfun, restmp);
1445 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1446 }
1447 else
1448 restmp = create_tmp_var_raw (restype, "retval");
1449 }
1450
1451 for (arg = a; arg; arg = DECL_CHAIN (arg))
1452 nargs++;
1453 vargs = VEC_alloc (tree, heap, nargs);
1454 if (this_adjusting)
1455 VEC_quick_push (tree, vargs,
1456 thunk_adjust (&bsi,
1457 a, 1, fixed_offset,
1458 virtual_offset));
1459 else
1460 VEC_quick_push (tree, vargs, a);
1461 for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
1462 VEC_quick_push (tree, vargs, arg);
1463 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1464 VEC_free (tree, heap, vargs);
1465 gimple_call_set_cannot_inline (call, true);
1466 gimple_call_set_from_thunk (call, true);
1467 if (restmp)
1468 gimple_call_set_lhs (call, restmp);
1469 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1470 mark_symbols_for_renaming (call);
1471 find_referenced_vars_in (call);
1472 update_stmt (call);
1473
1474 if (restmp && !this_adjusting)
1475 {
1476 tree true_label = NULL_TREE;
1477
1478 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1479 {
1480 gimple stmt;
1481 /* If the return type is a pointer, we need to
1482 protect against NULL. We know there will be an
1483 adjustment, because that's why we're emitting a
1484 thunk. */
1485 then_bb = create_basic_block (NULL, (void *) 0, bb);
1486 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1487 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1488 remove_edge (single_succ_edge (bb));
1489 true_label = gimple_block_label (then_bb);
1490 stmt = gimple_build_cond (NE_EXPR, restmp,
1491 build_zero_cst (TREE_TYPE (restmp)),
1492 NULL_TREE, NULL_TREE);
1493 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1494 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1495 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1496 make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1497 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1498 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1499 bsi = gsi_last_bb (then_bb);
1500 }
1501
1502 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1503 fixed_offset, virtual_offset);
1504 if (true_label)
1505 {
1506 gimple stmt;
1507 bsi = gsi_last_bb (else_bb);
1508 stmt = gimple_build_assign (restmp,
1509 build_zero_cst (TREE_TYPE (restmp)));
1510 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1511 bsi = gsi_last_bb (return_bb);
1512 }
1513 }
1514 else
1515 gimple_call_set_tail (call, true);
1516
1517 /* Build return value. */
1518 ret = gimple_build_return (restmp);
1519 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1520
1521 delete_unreachable_blocks ();
1522 update_ssa (TODO_update_ssa);
1523
1524 cgraph_remove_same_body_alias (node);
1525 /* Since we want to emit the thunk, we explicitly mark its name as
1526 referenced. */
1527 cgraph_add_new_function (thunk_fndecl, true);
1528 bitmap_obstack_release (NULL);
1529 }
1530 current_function_decl = NULL;
1531 }
1532
1533 /* Expand function specified by NODE. */
1534
1535 static void
1536 cgraph_expand_function (struct cgraph_node *node)
1537 {
1538 tree decl = node->decl;
1539
1540 /* We ought to not compile any inline clones. */
1541 gcc_assert (!node->global.inlined_to);
1542
1543 announce_function (decl);
1544 node->process = 0;
1545 if (node->same_body)
1546 {
1547 struct cgraph_node *alias, *next;
1548 bool saved_alias = node->alias;
1549 for (alias = node->same_body;
1550 alias && alias->next; alias = alias->next)
1551 ;
1552 /* Walk aliases in the order they were created; it is possible that
1553 thunks reffers to the aliases made earlier. */
1554 for (; alias; alias = next)
1555 {
1556 next = alias->previous;
1557 if (!alias->thunk.thunk_p)
1558 assemble_alias (alias->decl,
1559 DECL_ASSEMBLER_NAME (alias->thunk.alias));
1560 else
1561 assemble_thunk (alias);
1562 }
1563 node->alias = saved_alias;
1564 cgraph_process_new_functions ();
1565 }
1566
1567 gcc_assert (node->lowered);
1568
1569 /* Generate RTL for the body of DECL. */
1570 tree_rest_of_compilation (decl);
1571
1572 /* Make sure that BE didn't give up on compiling. */
1573 gcc_assert (TREE_ASM_WRITTEN (decl));
1574 current_function_decl = NULL;
1575 gcc_assert (!cgraph_preserve_function_body_p (decl));
1576 cgraph_release_function_body (node);
1577 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1578 points to the dead function body. */
1579 cgraph_node_remove_callees (node);
1580
1581 cgraph_function_flags_ready = true;
1582 }
1583
1584 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
1585
1586 bool
1587 cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason)
1588 {
1589 *reason = e->inline_failed;
1590 return !e->inline_failed;
1591 }
1592
1593
1594
1595 /* Expand all functions that must be output.
1596
1597 Attempt to topologically sort the nodes so function is output when
1598 all called functions are already assembled to allow data to be
1599 propagated across the callgraph. Use a stack to get smaller distance
1600 between a function and its callees (later we may choose to use a more
1601 sophisticated algorithm for function reordering; we will likely want
1602 to use subsections to make the output functions appear in top-down
1603 order). */
1604
1605 static void
1606 cgraph_expand_all_functions (void)
1607 {
1608 struct cgraph_node *node;
1609 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1610 int order_pos, new_order_pos = 0;
1611 int i;
1612
1613 order_pos = cgraph_postorder (order);
1614 gcc_assert (order_pos == cgraph_n_nodes);
1615
1616 /* Garbage collector may remove inline clones we eliminate during
1617 optimization. So we must be sure to not reference them. */
1618 for (i = 0; i < order_pos; i++)
1619 if (order[i]->process)
1620 order[new_order_pos++] = order[i];
1621
1622 for (i = new_order_pos - 1; i >= 0; i--)
1623 {
1624 node = order[i];
1625 if (node->process)
1626 {
1627 gcc_assert (node->reachable);
1628 node->process = 0;
1629 cgraph_expand_function (node);
1630 }
1631 }
1632 cgraph_process_new_functions ();
1633
1634 free (order);
1635
1636 }
1637
1638 /* This is used to sort the node types by the cgraph order number. */
1639
1640 enum cgraph_order_sort_kind
1641 {
1642 ORDER_UNDEFINED = 0,
1643 ORDER_FUNCTION,
1644 ORDER_VAR,
1645 ORDER_ASM
1646 };
1647
1648 struct cgraph_order_sort
1649 {
1650 enum cgraph_order_sort_kind kind;
1651 union
1652 {
1653 struct cgraph_node *f;
1654 struct varpool_node *v;
1655 struct cgraph_asm_node *a;
1656 } u;
1657 };
1658
1659 /* Output all functions, variables, and asm statements in the order
1660 according to their order fields, which is the order in which they
1661 appeared in the file. This implements -fno-toplevel-reorder. In
1662 this mode we may output functions and variables which don't really
1663 need to be output. */
1664
1665 static void
1666 cgraph_output_in_order (void)
1667 {
1668 int max;
1669 struct cgraph_order_sort *nodes;
1670 int i;
1671 struct cgraph_node *pf;
1672 struct varpool_node *pv;
1673 struct cgraph_asm_node *pa;
1674
1675 max = cgraph_order;
1676 nodes = XCNEWVEC (struct cgraph_order_sort, max);
1677
1678 varpool_analyze_pending_decls ();
1679
1680 for (pf = cgraph_nodes; pf; pf = pf->next)
1681 {
1682 if (pf->process)
1683 {
1684 i = pf->order;
1685 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1686 nodes[i].kind = ORDER_FUNCTION;
1687 nodes[i].u.f = pf;
1688 }
1689 }
1690
1691 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
1692 {
1693 i = pv->order;
1694 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1695 nodes[i].kind = ORDER_VAR;
1696 nodes[i].u.v = pv;
1697 }
1698
1699 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1700 {
1701 i = pa->order;
1702 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1703 nodes[i].kind = ORDER_ASM;
1704 nodes[i].u.a = pa;
1705 }
1706
1707 /* In toplevel reorder mode we output all statics; mark them as needed. */
1708 for (i = 0; i < max; ++i)
1709 {
1710 if (nodes[i].kind == ORDER_VAR)
1711 {
1712 varpool_mark_needed_node (nodes[i].u.v);
1713 }
1714 }
1715 varpool_empty_needed_queue ();
1716
1717 for (i = 0; i < max; ++i)
1718 if (nodes[i].kind == ORDER_VAR)
1719 varpool_finalize_named_section_flags (nodes[i].u.v);
1720
1721 for (i = 0; i < max; ++i)
1722 {
1723 switch (nodes[i].kind)
1724 {
1725 case ORDER_FUNCTION:
1726 nodes[i].u.f->process = 0;
1727 cgraph_expand_function (nodes[i].u.f);
1728 break;
1729
1730 case ORDER_VAR:
1731 varpool_assemble_decl (nodes[i].u.v);
1732 break;
1733
1734 case ORDER_ASM:
1735 assemble_asm (nodes[i].u.a->asm_str);
1736 break;
1737
1738 case ORDER_UNDEFINED:
1739 break;
1740
1741 default:
1742 gcc_unreachable ();
1743 }
1744 }
1745
1746 cgraph_asm_nodes = NULL;
1747 free (nodes);
1748 }
1749
1750 /* Return true when function body of DECL still needs to be kept around
1751 for later re-use. */
1752 bool
1753 cgraph_preserve_function_body_p (tree decl)
1754 {
1755 struct cgraph_node *node;
1756
1757 gcc_assert (cgraph_global_info_ready);
1758 /* Look if there is any clone around. */
1759 node = cgraph_node (decl);
1760 if (node->clones)
1761 return true;
1762 return false;
1763 }
1764
1765 static void
1766 ipa_passes (void)
1767 {
1768 set_cfun (NULL);
1769 current_function_decl = NULL;
1770 gimple_register_cfg_hooks ();
1771 bitmap_obstack_initialize (NULL);
1772
1773 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
1774
1775 if (!in_lto_p)
1776 {
1777 execute_ipa_pass_list (all_small_ipa_passes);
1778 if (seen_error ())
1779 return;
1780 }
1781
1782 /* If pass_all_early_optimizations was not scheduled, the state of
1783 the cgraph will not be properly updated. Update it now. */
1784 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
1785 cgraph_state = CGRAPH_STATE_IPA_SSA;
1786
1787 if (!in_lto_p)
1788 {
1789 /* Generate coverage variables and constructors. */
1790 coverage_finish ();
1791
1792 /* Process new functions added. */
1793 set_cfun (NULL);
1794 current_function_decl = NULL;
1795 cgraph_process_new_functions ();
1796
1797 execute_ipa_summary_passes
1798 ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
1799 }
1800
1801 /* Some targets need to handle LTO assembler output specially. */
1802 if (flag_generate_lto)
1803 targetm.asm_out.lto_start ();
1804
1805 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
1806
1807 if (!in_lto_p)
1808 ipa_write_summaries ();
1809
1810 if (flag_generate_lto)
1811 targetm.asm_out.lto_end ();
1812
1813 if (!flag_ltrans)
1814 execute_ipa_pass_list (all_regular_ipa_passes);
1815 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
1816
1817 bitmap_obstack_release (NULL);
1818 }
1819
1820
1821 /* Perform simple optimizations based on callgraph. */
1822
1823 void
1824 cgraph_optimize (void)
1825 {
1826 if (seen_error ())
1827 return;
1828
1829 #ifdef ENABLE_CHECKING
1830 verify_cgraph ();
1831 #endif
1832
1833 /* Frontend may output common variables after the unit has been finalized.
1834 It is safe to deal with them here as they are always zero initialized. */
1835 varpool_analyze_pending_decls ();
1836
1837 timevar_push (TV_CGRAPHOPT);
1838 if (pre_ipa_mem_report)
1839 {
1840 fprintf (stderr, "Memory consumption before IPA\n");
1841 dump_memory_report (false);
1842 }
1843 if (!quiet_flag)
1844 fprintf (stderr, "Performing interprocedural optimizations\n");
1845 cgraph_state = CGRAPH_STATE_IPA;
1846
1847 /* Don't run the IPA passes if there was any error or sorry messages. */
1848 if (!seen_error ())
1849 ipa_passes ();
1850
1851 /* Do nothing else if any IPA pass found errors. */
1852 if (seen_error ())
1853 {
1854 timevar_pop (TV_CGRAPHOPT);
1855 return;
1856 }
1857
1858 /* This pass remove bodies of extern inline functions we never inlined.
1859 Do this later so other IPA passes see what is really going on. */
1860 cgraph_remove_unreachable_nodes (false, dump_file);
1861 cgraph_global_info_ready = true;
1862 if (cgraph_dump_file)
1863 {
1864 fprintf (cgraph_dump_file, "Optimized ");
1865 dump_cgraph (cgraph_dump_file);
1866 dump_varpool (cgraph_dump_file);
1867 }
1868 if (post_ipa_mem_report)
1869 {
1870 fprintf (stderr, "Memory consumption after IPA\n");
1871 dump_memory_report (false);
1872 }
1873 timevar_pop (TV_CGRAPHOPT);
1874
1875 /* Output everything. */
1876 (*debug_hooks->assembly_start) ();
1877 if (!quiet_flag)
1878 fprintf (stderr, "Assembling functions:\n");
1879 #ifdef ENABLE_CHECKING
1880 verify_cgraph ();
1881 #endif
1882
1883 cgraph_materialize_all_clones ();
1884 cgraph_mark_functions_to_output ();
1885
1886 cgraph_state = CGRAPH_STATE_EXPANSION;
1887 if (!flag_toplevel_reorder)
1888 cgraph_output_in_order ();
1889 else
1890 {
1891 cgraph_output_pending_asms ();
1892
1893 cgraph_expand_all_functions ();
1894 varpool_remove_unreferenced_decls ();
1895
1896 varpool_assemble_pending_decls ();
1897 }
1898 cgraph_process_new_functions ();
1899 cgraph_state = CGRAPH_STATE_FINISHED;
1900
1901 if (cgraph_dump_file)
1902 {
1903 fprintf (cgraph_dump_file, "\nFinal ");
1904 dump_cgraph (cgraph_dump_file);
1905 dump_varpool (cgraph_dump_file);
1906 }
1907 #ifdef ENABLE_CHECKING
1908 verify_cgraph ();
1909 /* Double check that all inline clones are gone and that all
1910 function bodies have been released from memory. */
1911 if (!seen_error ())
1912 {
1913 struct cgraph_node *node;
1914 bool error_found = false;
1915
1916 for (node = cgraph_nodes; node; node = node->next)
1917 if (node->analyzed
1918 && (node->global.inlined_to
1919 || gimple_has_body_p (node->decl)))
1920 {
1921 error_found = true;
1922 dump_cgraph_node (stderr, node);
1923 }
1924 if (error_found)
1925 internal_error ("nodes with unreleased memory found");
1926 }
1927 #endif
1928 }
1929
1930 void
1931 init_cgraph (void)
1932 {
1933 if (!cgraph_dump_file)
1934 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1935 }
1936
1937 /* The edges representing the callers of the NEW_VERSION node were
1938 fixed by cgraph_function_versioning (), now the call_expr in their
1939 respective tree code should be updated to call the NEW_VERSION. */
1940
1941 static void
1942 update_call_expr (struct cgraph_node *new_version)
1943 {
1944 struct cgraph_edge *e;
1945
1946 gcc_assert (new_version);
1947
1948 /* Update the call expr on the edges to call the new version. */
1949 for (e = new_version->callers; e; e = e->next_caller)
1950 {
1951 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
1952 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
1953 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
1954 }
1955 }
1956
1957
1958 /* Create a new cgraph node which is the new version of
1959 OLD_VERSION node. REDIRECT_CALLERS holds the callers
1960 edges which should be redirected to point to
1961 NEW_VERSION. ALL the callees edges of OLD_VERSION
1962 are cloned to the new version node. Return the new
1963 version node.
1964
1965 If non-NULL BLOCK_TO_COPY determine what basic blocks
1966 was copied to prevent duplications of calls that are dead
1967 in the clone. */
1968
1969 static struct cgraph_node *
1970 cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
1971 tree new_decl,
1972 VEC(cgraph_edge_p,heap) *redirect_callers,
1973 bitmap bbs_to_copy)
1974 {
1975 struct cgraph_node *new_version;
1976 struct cgraph_edge *e;
1977 unsigned i;
1978
1979 gcc_assert (old_version);
1980
1981 new_version = cgraph_node (new_decl);
1982
1983 new_version->analyzed = true;
1984 new_version->local = old_version->local;
1985 new_version->local.externally_visible = false;
1986 new_version->local.local = true;
1987 new_version->local.vtable_method = false;
1988 new_version->global = old_version->global;
1989 new_version->rtl = old_version->rtl;
1990 new_version->reachable = true;
1991 new_version->count = old_version->count;
1992
1993 for (e = old_version->callees; e; e=e->next_callee)
1994 if (!bbs_to_copy
1995 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
1996 cgraph_clone_edge (e, new_version, e->call_stmt,
1997 e->lto_stmt_uid, REG_BR_PROB_BASE,
1998 CGRAPH_FREQ_BASE,
1999 e->loop_nest, true);
2000 for (e = old_version->indirect_calls; e; e=e->next_callee)
2001 if (!bbs_to_copy
2002 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
2003 cgraph_clone_edge (e, new_version, e->call_stmt,
2004 e->lto_stmt_uid, REG_BR_PROB_BASE,
2005 CGRAPH_FREQ_BASE,
2006 e->loop_nest, true);
2007 FOR_EACH_VEC_ELT (cgraph_edge_p, redirect_callers, i, e)
2008 {
2009 /* Redirect calls to the old version node to point to its new
2010 version. */
2011 cgraph_redirect_edge_callee (e, new_version);
2012 }
2013
2014 return new_version;
2015 }
2016
2017 /* Perform function versioning.
2018 Function versioning includes copying of the tree and
2019 a callgraph update (creating a new cgraph node and updating
2020 its callees and callers).
2021
2022 REDIRECT_CALLERS varray includes the edges to be redirected
2023 to the new version.
2024
2025 TREE_MAP is a mapping of tree nodes we want to replace with
2026 new ones (according to results of prior analysis).
2027 OLD_VERSION_NODE is the node that is versioned.
2028 It returns the new version's cgraph node.
2029 If non-NULL ARGS_TO_SKIP determine function parameters to remove
2030 from new version.
2031 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
2032 If non_NULL NEW_ENTRY determine new entry BB of the clone. */
2033
2034 struct cgraph_node *
2035 cgraph_function_versioning (struct cgraph_node *old_version_node,
2036 VEC(cgraph_edge_p,heap) *redirect_callers,
2037 VEC (ipa_replace_map_p,gc)* tree_map,
2038 bitmap args_to_skip,
2039 bitmap bbs_to_copy,
2040 basic_block new_entry_block,
2041 const char *clone_name)
2042 {
2043 tree old_decl = old_version_node->decl;
2044 struct cgraph_node *new_version_node = NULL;
2045 tree new_decl;
2046
2047 if (!tree_versionable_function_p (old_decl))
2048 return NULL;
2049
2050 gcc_assert (old_version_node->local.can_change_signature || !args_to_skip);
2051
2052 /* Make a new FUNCTION_DECL tree node for the
2053 new version. */
2054 if (!args_to_skip)
2055 new_decl = copy_node (old_decl);
2056 else
2057 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
2058
2059 /* Generate a new name for the new version. */
2060 DECL_NAME (new_decl) = clone_function_name (old_decl, clone_name);
2061 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
2062 SET_DECL_RTL (new_decl, NULL);
2063
2064 /* Create the new version's call-graph node.
2065 and update the edges of the new node. */
2066 new_version_node =
2067 cgraph_copy_node_for_versioning (old_version_node, new_decl,
2068 redirect_callers, bbs_to_copy);
2069
2070 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2071 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip,
2072 bbs_to_copy, new_entry_block);
2073
2074 /* Update the new version's properties.
2075 Make The new version visible only within this translation unit. Make sure
2076 that is not weak also.
2077 ??? We cannot use COMDAT linkage because there is no
2078 ABI support for this. */
2079 cgraph_make_decl_local (new_version_node->decl);
2080 DECL_VIRTUAL_P (new_version_node->decl) = 0;
2081 new_version_node->local.externally_visible = 0;
2082 new_version_node->local.local = 1;
2083 new_version_node->lowered = true;
2084
2085 /* Update the call_expr on the edges to call the new version node. */
2086 update_call_expr (new_version_node);
2087
2088 cgraph_call_function_insertion_hooks (new_version_node);
2089 return new_version_node;
2090 }
2091
2092 /* Produce separate function body for inline clones so the offline copy can be
2093 modified without affecting them. */
2094 struct cgraph_node *
2095 save_inline_function_body (struct cgraph_node *node)
2096 {
2097 struct cgraph_node *first_clone, *n;
2098
2099 gcc_assert (node == cgraph_node (node->decl));
2100
2101 cgraph_lower_function (node);
2102
2103 first_clone = node->clones;
2104
2105 first_clone->decl = copy_node (node->decl);
2106 cgraph_insert_node_to_hashtable (first_clone);
2107 gcc_assert (first_clone == cgraph_node (first_clone->decl));
2108 if (first_clone->next_sibling_clone)
2109 {
2110 for (n = first_clone->next_sibling_clone; n->next_sibling_clone; n = n->next_sibling_clone)
2111 n->clone_of = first_clone;
2112 n->clone_of = first_clone;
2113 n->next_sibling_clone = first_clone->clones;
2114 if (first_clone->clones)
2115 first_clone->clones->prev_sibling_clone = n;
2116 first_clone->clones = first_clone->next_sibling_clone;
2117 first_clone->next_sibling_clone->prev_sibling_clone = NULL;
2118 first_clone->next_sibling_clone = NULL;
2119 gcc_assert (!first_clone->prev_sibling_clone);
2120 }
2121 first_clone->clone_of = NULL;
2122 node->clones = NULL;
2123
2124 if (first_clone->clones)
2125 for (n = first_clone->clones; n != first_clone;)
2126 {
2127 gcc_assert (n->decl == node->decl);
2128 n->decl = first_clone->decl;
2129 if (n->clones)
2130 n = n->clones;
2131 else if (n->next_sibling_clone)
2132 n = n->next_sibling_clone;
2133 else
2134 {
2135 while (n != first_clone && !n->next_sibling_clone)
2136 n = n->clone_of;
2137 if (n != first_clone)
2138 n = n->next_sibling_clone;
2139 }
2140 }
2141
2142 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2143 tree_function_versioning (node->decl, first_clone->decl, NULL, true, NULL,
2144 NULL, NULL);
2145
2146 DECL_EXTERNAL (first_clone->decl) = 0;
2147 DECL_COMDAT_GROUP (first_clone->decl) = NULL_TREE;
2148 TREE_PUBLIC (first_clone->decl) = 0;
2149 DECL_COMDAT (first_clone->decl) = 0;
2150 VEC_free (ipa_opt_pass, heap,
2151 first_clone->ipa_transforms_to_apply);
2152 first_clone->ipa_transforms_to_apply = NULL;
2153
2154 #ifdef ENABLE_CHECKING
2155 verify_cgraph_node (first_clone);
2156 #endif
2157 return first_clone;
2158 }
2159
2160 /* Given virtual clone, turn it into actual clone. */
2161 static void
2162 cgraph_materialize_clone (struct cgraph_node *node)
2163 {
2164 bitmap_obstack_initialize (NULL);
2165 node->former_clone_of = node->clone_of->decl;
2166 if (node->clone_of->former_clone_of)
2167 node->former_clone_of = node->clone_of->former_clone_of;
2168 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2169 tree_function_versioning (node->clone_of->decl, node->decl,
2170 node->clone.tree_map, true,
2171 node->clone.args_to_skip, NULL, NULL);
2172 if (cgraph_dump_file)
2173 {
2174 dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
2175 dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
2176 }
2177
2178 /* Function is no longer clone. */
2179 if (node->next_sibling_clone)
2180 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
2181 if (node->prev_sibling_clone)
2182 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
2183 else
2184 node->clone_of->clones = node->next_sibling_clone;
2185 node->next_sibling_clone = NULL;
2186 node->prev_sibling_clone = NULL;
2187 if (!node->clone_of->analyzed && !node->clone_of->clones)
2188 {
2189 cgraph_release_function_body (node->clone_of);
2190 cgraph_node_remove_callees (node->clone_of);
2191 ipa_remove_all_references (&node->clone_of->ref_list);
2192 }
2193 node->clone_of = NULL;
2194 bitmap_obstack_release (NULL);
2195 }
2196
2197 /* If necessary, change the function declaration in the call statement
2198 associated with E so that it corresponds to the edge callee. */
2199
2200 gimple
2201 cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e)
2202 {
2203 tree decl = gimple_call_fndecl (e->call_stmt);
2204 gimple new_stmt;
2205 gimple_stmt_iterator gsi;
2206 bool gsi_computed = false;
2207 #ifdef ENABLE_CHECKING
2208 struct cgraph_node *node;
2209 #endif
2210
2211 if (e->indirect_unknown_callee
2212 || decl == e->callee->decl
2213 /* Don't update call from same body alias to the real function. */
2214 || (decl && cgraph_get_node (decl) == cgraph_get_node (e->callee->decl)))
2215 return e->call_stmt;
2216
2217 #ifdef ENABLE_CHECKING
2218 if (decl)
2219 {
2220 node = cgraph_get_node (decl);
2221 gcc_assert (!node || !node->clone.combined_args_to_skip);
2222 }
2223 #endif
2224
2225 if (cgraph_dump_file)
2226 {
2227 fprintf (cgraph_dump_file, "updating call of %s/%i -> %s/%i: ",
2228 cgraph_node_name (e->caller), e->caller->uid,
2229 cgraph_node_name (e->callee), e->callee->uid);
2230 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2231 if (e->callee->clone.combined_args_to_skip)
2232 {
2233 fprintf (cgraph_dump_file, " combined args to skip: ");
2234 dump_bitmap (cgraph_dump_file,
2235 e->callee->clone.combined_args_to_skip);
2236 }
2237 }
2238
2239 if (e->indirect_info &&
2240 e->indirect_info->thunk_delta != 0
2241 && (!e->callee->clone.combined_args_to_skip
2242 || !bitmap_bit_p (e->callee->clone.combined_args_to_skip, 0)))
2243 {
2244 if (cgraph_dump_file)
2245 fprintf (cgraph_dump_file, " Thunk delta is "
2246 HOST_WIDE_INT_PRINT_DEC "\n", e->indirect_info->thunk_delta);
2247 gsi = gsi_for_stmt (e->call_stmt);
2248 gsi_computed = true;
2249 gimple_adjust_this_by_delta (&gsi,
2250 build_int_cst (sizetype,
2251 e->indirect_info->thunk_delta));
2252 e->indirect_info->thunk_delta = 0;
2253 }
2254
2255 if (e->callee->clone.combined_args_to_skip)
2256 {
2257 int lp_nr;
2258
2259 new_stmt
2260 = gimple_call_copy_skip_args (e->call_stmt,
2261 e->callee->clone.combined_args_to_skip);
2262 gimple_call_set_fndecl (new_stmt, e->callee->decl);
2263
2264 if (gimple_vdef (new_stmt)
2265 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
2266 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2267
2268 if (!gsi_computed)
2269 gsi = gsi_for_stmt (e->call_stmt);
2270 gsi_replace (&gsi, new_stmt, false);
2271 /* We need to defer cleaning EH info on the new statement to
2272 fixup-cfg. We may not have dominator information at this point
2273 and thus would end up with unreachable blocks and have no way
2274 to communicate that we need to run CFG cleanup then. */
2275 lp_nr = lookup_stmt_eh_lp (e->call_stmt);
2276 if (lp_nr != 0)
2277 {
2278 remove_stmt_from_eh_lp (e->call_stmt);
2279 add_stmt_to_eh_lp (new_stmt, lp_nr);
2280 }
2281 }
2282 else
2283 {
2284 new_stmt = e->call_stmt;
2285 gimple_call_set_fndecl (new_stmt, e->callee->decl);
2286 update_stmt (new_stmt);
2287 }
2288
2289 cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt, new_stmt);
2290
2291 if (cgraph_dump_file)
2292 {
2293 fprintf (cgraph_dump_file, " updated to:");
2294 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2295 }
2296 return new_stmt;
2297 }
2298
2299 /* Once all functions from compilation unit are in memory, produce all clones
2300 and update all calls. We might also do this on demand if we don't want to
2301 bring all functions to memory prior compilation, but current WHOPR
2302 implementation does that and it is is bit easier to keep everything right in
2303 this order. */
2304 void
2305 cgraph_materialize_all_clones (void)
2306 {
2307 struct cgraph_node *node;
2308 bool stabilized = false;
2309
2310 if (cgraph_dump_file)
2311 fprintf (cgraph_dump_file, "Materializing clones\n");
2312 #ifdef ENABLE_CHECKING
2313 verify_cgraph ();
2314 #endif
2315
2316 /* We can also do topological order, but number of iterations should be
2317 bounded by number of IPA passes since single IPA pass is probably not
2318 going to create clones of clones it created itself. */
2319 while (!stabilized)
2320 {
2321 stabilized = true;
2322 for (node = cgraph_nodes; node; node = node->next)
2323 {
2324 if (node->clone_of && node->decl != node->clone_of->decl
2325 && !gimple_has_body_p (node->decl))
2326 {
2327 if (gimple_has_body_p (node->clone_of->decl))
2328 {
2329 if (cgraph_dump_file)
2330 {
2331 fprintf (cgraph_dump_file, "clonning %s to %s\n",
2332 cgraph_node_name (node->clone_of),
2333 cgraph_node_name (node));
2334 if (node->clone.tree_map)
2335 {
2336 unsigned int i;
2337 fprintf (cgraph_dump_file, " replace map: ");
2338 for (i = 0; i < VEC_length (ipa_replace_map_p,
2339 node->clone.tree_map);
2340 i++)
2341 {
2342 struct ipa_replace_map *replace_info;
2343 replace_info = VEC_index (ipa_replace_map_p,
2344 node->clone.tree_map,
2345 i);
2346 print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
2347 fprintf (cgraph_dump_file, " -> ");
2348 print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
2349 fprintf (cgraph_dump_file, "%s%s;",
2350 replace_info->replace_p ? "(replace)":"",
2351 replace_info->ref_p ? "(ref)":"");
2352 }
2353 fprintf (cgraph_dump_file, "\n");
2354 }
2355 if (node->clone.args_to_skip)
2356 {
2357 fprintf (cgraph_dump_file, " args_to_skip: ");
2358 dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
2359 }
2360 if (node->clone.args_to_skip)
2361 {
2362 fprintf (cgraph_dump_file, " combined_args_to_skip:");
2363 dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
2364 }
2365 }
2366 cgraph_materialize_clone (node);
2367 stabilized = false;
2368 }
2369 }
2370 }
2371 }
2372 for (node = cgraph_nodes; node; node = node->next)
2373 if (!node->analyzed && node->callees)
2374 cgraph_node_remove_callees (node);
2375 if (cgraph_dump_file)
2376 fprintf (cgraph_dump_file, "Materialization Call site updates done.\n");
2377 #ifdef ENABLE_CHECKING
2378 verify_cgraph ();
2379 #endif
2380 cgraph_remove_unreachable_nodes (false, cgraph_dump_file);
2381 }
2382
2383 #include "gt-cgraphunit.h"