alias-1.c: Require alias support.
[gcc.git] / gcc / cgraphunit.c
1 /* Callgraph based interprocedural optimizations.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This module implements main driver of compilation process as well as
23 few basic interprocedural optimizers.
24
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
27
28 The front-end is supposed to use following functionality:
29
30 - cgraph_finalize_function
31
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
34
35 (There is one exception needed for implementing GCC extern inline
36 function.)
37
38 - varpool_finalize_variable
39
40 This function has same behavior as the above but is used for static
41 variables.
42
43 - cgraph_finalize_compilation_unit
44
45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
47
48 In the the call-graph construction and local function
49 analysis takes place here. Bodies of unreachable functions are released
50 to conserve memory usage.
51
52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
54
55 - cgraph_optimize
56
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
61
62 - cgraph_mark_needed_node
63 - varpool_mark_needed_node
64
65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
69 used by C++ frontend to explicitly mark the keyed methods.
70
71 - analyze_expr callback
72
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
76
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
79
80 Analyzing of all functions is deferred
81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
82
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
90
91 The intra-procedural information is produced and its existence
92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
95
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
98
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
103
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
106 where reference has been optimized out. */
107
108
109 #include "config.h"
110 #include "system.h"
111 #include "coretypes.h"
112 #include "tm.h"
113 #include "tree.h"
114 #include "rtl.h"
115 #include "tree-flow.h"
116 #include "tree-inline.h"
117 #include "langhooks.h"
118 #include "pointer-set.h"
119 #include "toplev.h"
120 #include "flags.h"
121 #include "ggc.h"
122 #include "debug.h"
123 #include "target.h"
124 #include "cgraph.h"
125 #include "diagnostic.h"
126 #include "tree-pretty-print.h"
127 #include "gimple-pretty-print.h"
128 #include "timevar.h"
129 #include "params.h"
130 #include "fibheap.h"
131 #include "intl.h"
132 #include "function.h"
133 #include "ipa-prop.h"
134 #include "gimple.h"
135 #include "tree-iterator.h"
136 #include "tree-pass.h"
137 #include "tree-dump.h"
138 #include "output.h"
139 #include "coverage.h"
140 #include "plugin.h"
141
142 static void cgraph_expand_all_functions (void);
143 static void cgraph_mark_functions_to_output (void);
144 static void cgraph_expand_function (struct cgraph_node *);
145 static void cgraph_output_pending_asms (void);
146 static void cgraph_analyze_function (struct cgraph_node *);
147
148 FILE *cgraph_dump_file;
149
150 /* Used for vtable lookup in thunk adjusting. */
151 static GTY (()) tree vtable_entry_type;
152
153 /* Determine if function DECL is needed. That is, visible to something
154 either outside this translation unit, something magic in the system
155 configury. */
156
157 bool
158 cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
159 {
160 /* If the user told us it is used, then it must be so. */
161 if (node->local.externally_visible)
162 return true;
163
164 /* ??? If the assembler name is set by hand, it is possible to assemble
165 the name later after finalizing the function and the fact is noticed
166 in assemble_name then. This is arguably a bug. */
167 if (DECL_ASSEMBLER_NAME_SET_P (decl)
168 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
169 return true;
170
171 /* With -fkeep-inline-functions we are keeping all inline functions except
172 for extern inline ones. */
173 if (flag_keep_inline_functions
174 && DECL_DECLARED_INLINE_P (decl)
175 && !DECL_EXTERNAL (decl)
176 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl)))
177 return true;
178
179 /* If we decided it was needed before, but at the time we didn't have
180 the body of the function available, then it's still needed. We have
181 to go back and re-check its dependencies now. */
182 if (node->needed)
183 return true;
184
185 /* Externally visible functions must be output. The exception is
186 COMDAT functions that must be output only when they are needed.
187
188 When not optimizing, also output the static functions. (see
189 PR24561), but don't do so for always_inline functions, functions
190 declared inline and nested functions. These were optimized out
191 in the original implementation and it is unclear whether we want
192 to change the behavior here. */
193 if (((TREE_PUBLIC (decl)
194 || (!optimize
195 && !node->local.disregard_inline_limits
196 && !DECL_DECLARED_INLINE_P (decl)
197 && !(DECL_CONTEXT (decl)
198 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)))
199 && !flag_whole_program
200 && !flag_lto)
201 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
202 return true;
203
204 return false;
205 }
206
207 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
208 functions into callgraph in a way so they look like ordinary reachable
209 functions inserted into callgraph already at construction time. */
210
211 bool
212 cgraph_process_new_functions (void)
213 {
214 bool output = false;
215 tree fndecl;
216 struct cgraph_node *node;
217
218 varpool_analyze_pending_decls ();
219 /* Note that this queue may grow as its being processed, as the new
220 functions may generate new ones. */
221 while (cgraph_new_nodes)
222 {
223 node = cgraph_new_nodes;
224 fndecl = node->decl;
225 cgraph_new_nodes = cgraph_new_nodes->next_needed;
226 switch (cgraph_state)
227 {
228 case CGRAPH_STATE_CONSTRUCTION:
229 /* At construction time we just need to finalize function and move
230 it into reachable functions list. */
231
232 node->next_needed = NULL;
233 cgraph_finalize_function (fndecl, false);
234 cgraph_mark_reachable_node (node);
235 output = true;
236 break;
237
238 case CGRAPH_STATE_IPA:
239 case CGRAPH_STATE_IPA_SSA:
240 /* When IPA optimization already started, do all essential
241 transformations that has been already performed on the whole
242 cgraph but not on this function. */
243
244 gimple_register_cfg_hooks ();
245 if (!node->analyzed)
246 cgraph_analyze_function (node);
247 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
248 current_function_decl = fndecl;
249 compute_inline_parameters (node);
250 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
251 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
252 /* When not optimizing, be sure we run early local passes anyway
253 to expand OMP. */
254 || !optimize)
255 execute_pass_list (pass_early_local_passes.pass.sub);
256 free_dominance_info (CDI_POST_DOMINATORS);
257 free_dominance_info (CDI_DOMINATORS);
258 pop_cfun ();
259 current_function_decl = NULL;
260 break;
261
262 case CGRAPH_STATE_EXPANSION:
263 /* Functions created during expansion shall be compiled
264 directly. */
265 node->process = 0;
266 cgraph_expand_function (node);
267 break;
268
269 default:
270 gcc_unreachable ();
271 break;
272 }
273 cgraph_call_function_insertion_hooks (node);
274 varpool_analyze_pending_decls ();
275 }
276 return output;
277 }
278
279 /* As an GCC extension we allow redefinition of the function. The
280 semantics when both copies of bodies differ is not well defined.
281 We replace the old body with new body so in unit at a time mode
282 we always use new body, while in normal mode we may end up with
283 old body inlined into some functions and new body expanded and
284 inlined in others.
285
286 ??? It may make more sense to use one body for inlining and other
287 body for expanding the function but this is difficult to do. */
288
289 static void
290 cgraph_reset_node (struct cgraph_node *node)
291 {
292 /* If node->process is set, then we have already begun whole-unit analysis.
293 This is *not* testing for whether we've already emitted the function.
294 That case can be sort-of legitimately seen with real function redefinition
295 errors. I would argue that the front end should never present us with
296 such a case, but don't enforce that for now. */
297 gcc_assert (!node->process);
298
299 /* Reset our data structures so we can analyze the function again. */
300 memset (&node->local, 0, sizeof (node->local));
301 memset (&node->global, 0, sizeof (node->global));
302 memset (&node->rtl, 0, sizeof (node->rtl));
303 node->analyzed = false;
304 node->local.redefined_extern_inline = true;
305 node->local.finalized = false;
306
307 cgraph_node_remove_callees (node);
308
309 /* We may need to re-queue the node for assembling in case
310 we already proceeded it and ignored as not needed or got
311 a re-declaration in IMA mode. */
312 if (node->reachable)
313 {
314 struct cgraph_node *n;
315
316 for (n = cgraph_nodes_queue; n; n = n->next_needed)
317 if (n == node)
318 break;
319 if (!n)
320 node->reachable = 0;
321 }
322 }
323
324 static void
325 cgraph_lower_function (struct cgraph_node *node)
326 {
327 if (node->lowered)
328 return;
329
330 if (node->nested)
331 lower_nested_functions (node->decl);
332 gcc_assert (!node->nested);
333
334 tree_lowering_passes (node->decl);
335 node->lowered = true;
336 }
337
338 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
339 logic in effect. If NESTED is true, then our caller cannot stand to have
340 the garbage collector run at the moment. We would need to either create
341 a new GC context, or just not compile right now. */
342
343 void
344 cgraph_finalize_function (tree decl, bool nested)
345 {
346 struct cgraph_node *node = cgraph_node (decl);
347
348 if (node->local.finalized)
349 cgraph_reset_node (node);
350
351 node->pid = cgraph_max_pid ++;
352 notice_global_symbol (decl);
353 node->local.finalized = true;
354 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
355 node->finalized_by_frontend = true;
356
357 if (cgraph_decide_is_function_needed (node, decl))
358 cgraph_mark_needed_node (node);
359
360 /* Since we reclaim unreachable nodes at the end of every language
361 level unit, we need to be conservative about possible entry points
362 there. */
363 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
364 || DECL_STATIC_CONSTRUCTOR (decl)
365 || DECL_STATIC_DESTRUCTOR (decl)
366 /* COMDAT virtual functions may be referenced by vtable from
367 other compilatoin unit. Still we want to devirtualize calls
368 to those so we need to analyze them.
369 FIXME: We should introduce may edges for this purpose and update
370 their handling in unreachable function removal and inliner too. */
371 || (DECL_VIRTUAL_P (decl) && (DECL_COMDAT (decl) || DECL_EXTERNAL (decl))))
372 cgraph_mark_reachable_node (node);
373
374 /* If we've not yet emitted decl, tell the debug info about it. */
375 if (!TREE_ASM_WRITTEN (decl))
376 (*debug_hooks->deferred_inline_function) (decl);
377
378 /* Possibly warn about unused parameters. */
379 if (warn_unused_parameter)
380 do_warn_unused_parameter (decl);
381
382 if (!nested)
383 ggc_collect ();
384 }
385
386 /* C99 extern inline keywords allow changing of declaration after function
387 has been finalized. We need to re-decide if we want to mark the function as
388 needed then. */
389
390 void
391 cgraph_mark_if_needed (tree decl)
392 {
393 struct cgraph_node *node = cgraph_node (decl);
394 if (node->local.finalized && cgraph_decide_is_function_needed (node, decl))
395 cgraph_mark_needed_node (node);
396 }
397
398 /* Return TRUE if NODE2 is equivalent to NODE or its clone. */
399 static bool
400 clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
401 {
402 while (node != node2 && node2)
403 node2 = node2->clone_of;
404 return node2 != NULL;
405 }
406
407 /* Verify edge E count and frequency. */
408
409 static bool
410 verify_edge_count_and_frequency (struct cgraph_edge *e)
411 {
412 bool error_found = false;
413 if (e->count < 0)
414 {
415 error ("caller edge count is negative");
416 error_found = true;
417 }
418 if (e->frequency < 0)
419 {
420 error ("caller edge frequency is negative");
421 error_found = true;
422 }
423 if (e->frequency > CGRAPH_FREQ_MAX)
424 {
425 error ("caller edge frequency is too large");
426 error_found = true;
427 }
428 if (gimple_has_body_p (e->caller->decl)
429 && !e->caller->global.inlined_to
430 && (e->frequency
431 != compute_call_stmt_bb_frequency (e->caller->decl,
432 gimple_bb (e->call_stmt))))
433 {
434 error ("caller edge frequency %i does not match BB freqency %i",
435 e->frequency,
436 compute_call_stmt_bb_frequency (e->caller->decl,
437 gimple_bb (e->call_stmt)));
438 error_found = true;
439 }
440 return error_found;
441 }
442
443 /* Verify cgraph nodes of given cgraph node. */
444 DEBUG_FUNCTION void
445 verify_cgraph_node (struct cgraph_node *node)
446 {
447 struct cgraph_edge *e;
448 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
449 struct function *saved_cfun = cfun;
450 basic_block this_block;
451 gimple_stmt_iterator gsi;
452 bool error_found = false;
453
454 if (seen_error ())
455 return;
456
457 timevar_push (TV_CGRAPH_VERIFY);
458 /* debug_generic_stmt needs correct cfun */
459 set_cfun (this_cfun);
460 for (e = node->callees; e; e = e->next_callee)
461 if (e->aux)
462 {
463 error ("aux field set for edge %s->%s",
464 identifier_to_locale (cgraph_node_name (e->caller)),
465 identifier_to_locale (cgraph_node_name (e->callee)));
466 error_found = true;
467 }
468 if (node->count < 0)
469 {
470 error ("execution count is negative");
471 error_found = true;
472 }
473 if (node->global.inlined_to && node->local.externally_visible)
474 {
475 error ("externally visible inline clone");
476 error_found = true;
477 }
478 if (node->global.inlined_to && node->address_taken)
479 {
480 error ("inline clone with address taken");
481 error_found = true;
482 }
483 if (node->global.inlined_to && node->needed)
484 {
485 error ("inline clone is needed");
486 error_found = true;
487 }
488 for (e = node->indirect_calls; e; e = e->next_callee)
489 {
490 if (e->aux)
491 {
492 error ("aux field set for indirect edge from %s",
493 identifier_to_locale (cgraph_node_name (e->caller)));
494 error_found = true;
495 }
496 if (!e->indirect_unknown_callee
497 || !e->indirect_info)
498 {
499 error ("An indirect edge from %s is not marked as indirect or has "
500 "associated indirect_info, the corresponding statement is: ",
501 identifier_to_locale (cgraph_node_name (e->caller)));
502 debug_gimple_stmt (e->call_stmt);
503 error_found = true;
504 }
505 }
506 for (e = node->callers; e; e = e->next_caller)
507 {
508 if (verify_edge_count_and_frequency (e))
509 error_found = true;
510 if (!e->inline_failed)
511 {
512 if (node->global.inlined_to
513 != (e->caller->global.inlined_to
514 ? e->caller->global.inlined_to : e->caller))
515 {
516 error ("inlined_to pointer is wrong");
517 error_found = true;
518 }
519 if (node->callers->next_caller)
520 {
521 error ("multiple inline callers");
522 error_found = true;
523 }
524 }
525 else
526 if (node->global.inlined_to)
527 {
528 error ("inlined_to pointer set for noninline callers");
529 error_found = true;
530 }
531 }
532 for (e = node->indirect_calls; e; e = e->next_callee)
533 if (verify_edge_count_and_frequency (e))
534 error_found = true;
535 if (!node->callers && node->global.inlined_to)
536 {
537 error ("inlined_to pointer is set but no predecessors found");
538 error_found = true;
539 }
540 if (node->global.inlined_to == node)
541 {
542 error ("inlined_to pointer refers to itself");
543 error_found = true;
544 }
545
546 if (!cgraph_node (node->decl))
547 {
548 error ("node not found in cgraph_hash");
549 error_found = true;
550 }
551
552 if (node->clone_of)
553 {
554 struct cgraph_node *n;
555 for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
556 if (n == node)
557 break;
558 if (!n)
559 {
560 error ("node has wrong clone_of");
561 error_found = true;
562 }
563 }
564 if (node->clones)
565 {
566 struct cgraph_node *n;
567 for (n = node->clones; n; n = n->next_sibling_clone)
568 if (n->clone_of != node)
569 break;
570 if (n)
571 {
572 error ("node has wrong clone list");
573 error_found = true;
574 }
575 }
576 if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
577 {
578 error ("node is in clone list but it is not clone");
579 error_found = true;
580 }
581 if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
582 {
583 error ("node has wrong prev_clone pointer");
584 error_found = true;
585 }
586 if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
587 {
588 error ("double linked list of clones corrupted");
589 error_found = true;
590 }
591 if (node->same_comdat_group)
592 {
593 struct cgraph_node *n = node->same_comdat_group;
594
595 if (!DECL_ONE_ONLY (node->decl))
596 {
597 error ("non-DECL_ONE_ONLY node in a same_comdat_group list");
598 error_found = true;
599 }
600 if (n == node)
601 {
602 error ("node is alone in a comdat group");
603 error_found = true;
604 }
605 do
606 {
607 if (!n->same_comdat_group)
608 {
609 error ("same_comdat_group is not a circular list");
610 error_found = true;
611 break;
612 }
613 n = n->same_comdat_group;
614 }
615 while (n != node);
616 }
617
618 if (node->analyzed && gimple_has_body_p (node->decl)
619 && !TREE_ASM_WRITTEN (node->decl)
620 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
621 && !flag_wpa)
622 {
623 if (this_cfun->cfg)
624 {
625 /* The nodes we're interested in are never shared, so walk
626 the tree ignoring duplicates. */
627 struct pointer_set_t *visited_nodes = pointer_set_create ();
628 /* Reach the trees by walking over the CFG, and note the
629 enclosing basic-blocks in the call edges. */
630 FOR_EACH_BB_FN (this_block, this_cfun)
631 for (gsi = gsi_start_bb (this_block);
632 !gsi_end_p (gsi);
633 gsi_next (&gsi))
634 {
635 gimple stmt = gsi_stmt (gsi);
636 if (is_gimple_call (stmt))
637 {
638 struct cgraph_edge *e = cgraph_edge (node, stmt);
639 tree decl = gimple_call_fndecl (stmt);
640 if (e)
641 {
642 if (e->aux)
643 {
644 error ("shared call_stmt:");
645 debug_gimple_stmt (stmt);
646 error_found = true;
647 }
648 if (!e->indirect_unknown_callee)
649 {
650 if (e->callee->same_body_alias)
651 {
652 error ("edge points to same body alias:");
653 debug_tree (e->callee->decl);
654 error_found = true;
655 }
656 else if (!e->callee->global.inlined_to
657 && decl
658 && cgraph_get_node (decl)
659 && (e->callee->former_clone_of
660 != cgraph_get_node (decl)->decl)
661 && !clone_of_p (cgraph_node (decl),
662 e->callee))
663 {
664 error ("edge points to wrong declaration:");
665 debug_tree (e->callee->decl);
666 fprintf (stderr," Instead of:");
667 debug_tree (decl);
668 error_found = true;
669 }
670 }
671 else if (decl)
672 {
673 error ("an indirect edge with unknown callee "
674 "corresponding to a call_stmt with "
675 "a known declaration:");
676 error_found = true;
677 debug_gimple_stmt (e->call_stmt);
678 }
679 e->aux = (void *)1;
680 }
681 else if (decl)
682 {
683 error ("missing callgraph edge for call stmt:");
684 debug_gimple_stmt (stmt);
685 error_found = true;
686 }
687 }
688 }
689 pointer_set_destroy (visited_nodes);
690 }
691 else
692 /* No CFG available?! */
693 gcc_unreachable ();
694
695 for (e = node->callees; e; e = e->next_callee)
696 {
697 if (!e->aux)
698 {
699 error ("edge %s->%s has no corresponding call_stmt",
700 identifier_to_locale (cgraph_node_name (e->caller)),
701 identifier_to_locale (cgraph_node_name (e->callee)));
702 debug_gimple_stmt (e->call_stmt);
703 error_found = true;
704 }
705 e->aux = 0;
706 }
707 for (e = node->indirect_calls; e; e = e->next_callee)
708 {
709 if (!e->aux)
710 {
711 error ("an indirect edge from %s has no corresponding call_stmt",
712 identifier_to_locale (cgraph_node_name (e->caller)));
713 debug_gimple_stmt (e->call_stmt);
714 error_found = true;
715 }
716 e->aux = 0;
717 }
718 }
719 if (error_found)
720 {
721 dump_cgraph_node (stderr, node);
722 internal_error ("verify_cgraph_node failed");
723 }
724 set_cfun (saved_cfun);
725 timevar_pop (TV_CGRAPH_VERIFY);
726 }
727
728 /* Verify whole cgraph structure. */
729 DEBUG_FUNCTION void
730 verify_cgraph (void)
731 {
732 struct cgraph_node *node;
733
734 if (seen_error ())
735 return;
736
737 for (node = cgraph_nodes; node; node = node->next)
738 verify_cgraph_node (node);
739 }
740
741 /* Output all asm statements we have stored up to be output. */
742
743 static void
744 cgraph_output_pending_asms (void)
745 {
746 struct cgraph_asm_node *can;
747
748 if (seen_error ())
749 return;
750
751 for (can = cgraph_asm_nodes; can; can = can->next)
752 assemble_asm (can->asm_str);
753 cgraph_asm_nodes = NULL;
754 }
755
756 /* Analyze the function scheduled to be output. */
757 static void
758 cgraph_analyze_function (struct cgraph_node *node)
759 {
760 tree save = current_function_decl;
761 tree decl = node->decl;
762
763 current_function_decl = decl;
764 push_cfun (DECL_STRUCT_FUNCTION (decl));
765
766 assign_assembler_name_if_neeeded (node->decl);
767
768 /* Make sure to gimplify bodies only once. During analyzing a
769 function we lower it, which will require gimplified nested
770 functions, so we can end up here with an already gimplified
771 body. */
772 if (!gimple_body (decl))
773 gimplify_function_tree (decl);
774 dump_function (TDI_generic, decl);
775
776 cgraph_lower_function (node);
777 node->analyzed = true;
778
779 pop_cfun ();
780 current_function_decl = save;
781 }
782
783 /* Look for externally_visible and used attributes and mark cgraph nodes
784 accordingly.
785
786 We cannot mark the nodes at the point the attributes are processed (in
787 handle_*_attribute) because the copy of the declarations available at that
788 point may not be canonical. For example, in:
789
790 void f();
791 void f() __attribute__((used));
792
793 the declaration we see in handle_used_attribute will be the second
794 declaration -- but the front end will subsequently merge that declaration
795 with the original declaration and discard the second declaration.
796
797 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
798
799 void f() {}
800 void f() __attribute__((externally_visible));
801
802 is valid.
803
804 So, we walk the nodes at the end of the translation unit, applying the
805 attributes at that point. */
806
807 static void
808 process_function_and_variable_attributes (struct cgraph_node *first,
809 struct varpool_node *first_var)
810 {
811 struct cgraph_node *node;
812 struct varpool_node *vnode;
813
814 for (node = cgraph_nodes; node != first; node = node->next)
815 {
816 tree decl = node->decl;
817 if (DECL_PRESERVE_P (decl))
818 cgraph_mark_needed_node (node);
819 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
820 {
821 if (! TREE_PUBLIC (node->decl))
822 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
823 "%<externally_visible%>"
824 " attribute have effect only on public objects");
825 else if (node->local.finalized)
826 cgraph_mark_needed_node (node);
827 }
828 }
829 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
830 {
831 tree decl = vnode->decl;
832 if (DECL_PRESERVE_P (decl))
833 {
834 vnode->force_output = true;
835 if (vnode->finalized)
836 varpool_mark_needed_node (vnode);
837 }
838 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
839 {
840 if (! TREE_PUBLIC (vnode->decl))
841 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
842 "%<externally_visible%>"
843 " attribute have effect only on public objects");
844 else if (vnode->finalized)
845 varpool_mark_needed_node (vnode);
846 }
847 }
848 }
849
850 /* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
851 each reachable functions) and build cgraph.
852 The function can be called multiple times after inserting new nodes
853 into beginning of queue. Just the new part of queue is re-scanned then. */
854
855 static void
856 cgraph_analyze_functions (void)
857 {
858 /* Keep track of already processed nodes when called multiple times for
859 intermodule optimization. */
860 static struct cgraph_node *first_analyzed;
861 struct cgraph_node *first_processed = first_analyzed;
862 static struct varpool_node *first_analyzed_var;
863 struct cgraph_node *node, *next;
864
865 bitmap_obstack_initialize (NULL);
866 process_function_and_variable_attributes (first_processed,
867 first_analyzed_var);
868 first_processed = cgraph_nodes;
869 first_analyzed_var = varpool_nodes;
870 varpool_analyze_pending_decls ();
871 if (cgraph_dump_file)
872 {
873 fprintf (cgraph_dump_file, "Initial entry points:");
874 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
875 if (node->needed)
876 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
877 fprintf (cgraph_dump_file, "\n");
878 }
879 cgraph_process_new_functions ();
880
881 /* Propagate reachability flag and lower representation of all reachable
882 functions. In the future, lowering will introduce new functions and
883 new entry points on the way (by template instantiation and virtual
884 method table generation for instance). */
885 while (cgraph_nodes_queue)
886 {
887 struct cgraph_edge *edge;
888 tree decl = cgraph_nodes_queue->decl;
889
890 node = cgraph_nodes_queue;
891 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
892 node->next_needed = NULL;
893
894 /* ??? It is possible to create extern inline function and later using
895 weak alias attribute to kill its body. See
896 gcc.c-torture/compile/20011119-1.c */
897 if (!DECL_STRUCT_FUNCTION (decl))
898 {
899 cgraph_reset_node (node);
900 continue;
901 }
902
903 if (!node->analyzed)
904 cgraph_analyze_function (node);
905
906 for (edge = node->callees; edge; edge = edge->next_callee)
907 if (!edge->callee->reachable)
908 cgraph_mark_reachable_node (edge->callee);
909
910 if (node->same_comdat_group)
911 {
912 for (next = node->same_comdat_group;
913 next != node;
914 next = next->same_comdat_group)
915 cgraph_mark_reachable_node (next);
916 }
917
918 /* If decl is a clone of an abstract function, mark that abstract
919 function so that we don't release its body. The DECL_INITIAL() of that
920 abstract function declaration will be later needed to output debug info. */
921 if (DECL_ABSTRACT_ORIGIN (decl))
922 {
923 struct cgraph_node *origin_node = cgraph_node (DECL_ABSTRACT_ORIGIN (decl));
924 origin_node->abstract_and_needed = true;
925 }
926
927 /* We finalize local static variables during constructing callgraph
928 edges. Process their attributes too. */
929 process_function_and_variable_attributes (first_processed,
930 first_analyzed_var);
931 first_processed = cgraph_nodes;
932 first_analyzed_var = varpool_nodes;
933 varpool_analyze_pending_decls ();
934 cgraph_process_new_functions ();
935 }
936
937 /* Collect entry points to the unit. */
938 if (cgraph_dump_file)
939 {
940 fprintf (cgraph_dump_file, "Unit entry points:");
941 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
942 if (node->needed)
943 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
944 fprintf (cgraph_dump_file, "\n\nInitial ");
945 dump_cgraph (cgraph_dump_file);
946 dump_varpool (cgraph_dump_file);
947 }
948
949 if (cgraph_dump_file)
950 fprintf (cgraph_dump_file, "\nReclaiming functions:");
951
952 for (node = cgraph_nodes; node != first_analyzed; node = next)
953 {
954 tree decl = node->decl;
955 next = node->next;
956
957 if (node->local.finalized && !gimple_has_body_p (decl))
958 cgraph_reset_node (node);
959
960 if (!node->reachable && gimple_has_body_p (decl))
961 {
962 if (cgraph_dump_file)
963 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
964 cgraph_remove_node (node);
965 continue;
966 }
967 else
968 node->next_needed = NULL;
969 gcc_assert (!node->local.finalized || gimple_has_body_p (decl));
970 gcc_assert (node->analyzed == node->local.finalized);
971 }
972 if (cgraph_dump_file)
973 {
974 fprintf (cgraph_dump_file, "\n\nReclaimed ");
975 dump_cgraph (cgraph_dump_file);
976 dump_varpool (cgraph_dump_file);
977 }
978 bitmap_obstack_release (NULL);
979 first_analyzed = cgraph_nodes;
980 ggc_collect ();
981 }
982
983
984 /* Analyze the whole compilation unit once it is parsed completely. */
985
986 void
987 cgraph_finalize_compilation_unit (void)
988 {
989 timevar_push (TV_CGRAPH);
990
991 /* Do not skip analyzing the functions if there were errors, we
992 miss diagnostics for following functions otherwise. */
993
994 /* Emit size functions we didn't inline. */
995 finalize_size_functions ();
996
997 /* Mark alias targets necessary and emit diagnostics. */
998 finish_aliases_1 ();
999
1000 if (!quiet_flag)
1001 {
1002 fprintf (stderr, "\nAnalyzing compilation unit\n");
1003 fflush (stderr);
1004 }
1005
1006 /* Gimplify and lower all functions, compute reachability and
1007 remove unreachable nodes. */
1008 cgraph_analyze_functions ();
1009
1010 /* Mark alias targets necessary and emit diagnostics. */
1011 finish_aliases_1 ();
1012
1013 /* Gimplify and lower thunks. */
1014 cgraph_analyze_functions ();
1015
1016 /* Finally drive the pass manager. */
1017 cgraph_optimize ();
1018
1019 timevar_pop (TV_CGRAPH);
1020 }
1021
1022
1023 /* Figure out what functions we want to assemble. */
1024
1025 static void
1026 cgraph_mark_functions_to_output (void)
1027 {
1028 struct cgraph_node *node;
1029 #ifdef ENABLE_CHECKING
1030 bool check_same_comdat_groups = false;
1031
1032 for (node = cgraph_nodes; node; node = node->next)
1033 gcc_assert (!node->process);
1034 #endif
1035
1036 for (node = cgraph_nodes; node; node = node->next)
1037 {
1038 tree decl = node->decl;
1039 struct cgraph_edge *e;
1040
1041 gcc_assert (!node->process || node->same_comdat_group);
1042 if (node->process)
1043 continue;
1044
1045 for (e = node->callers; e; e = e->next_caller)
1046 if (e->inline_failed)
1047 break;
1048
1049 /* We need to output all local functions that are used and not
1050 always inlined, as well as those that are reachable from
1051 outside the current compilation unit. */
1052 if (node->analyzed
1053 && !node->global.inlined_to
1054 && (!cgraph_only_called_directly_p (node)
1055 || (e && node->reachable))
1056 && !TREE_ASM_WRITTEN (decl)
1057 && !DECL_EXTERNAL (decl))
1058 {
1059 node->process = 1;
1060 if (node->same_comdat_group)
1061 {
1062 struct cgraph_node *next;
1063 for (next = node->same_comdat_group;
1064 next != node;
1065 next = next->same_comdat_group)
1066 next->process = 1;
1067 }
1068 }
1069 else if (node->same_comdat_group)
1070 {
1071 #ifdef ENABLE_CHECKING
1072 check_same_comdat_groups = true;
1073 #endif
1074 }
1075 else
1076 {
1077 /* We should've reclaimed all functions that are not needed. */
1078 #ifdef ENABLE_CHECKING
1079 if (!node->global.inlined_to
1080 && gimple_has_body_p (decl)
1081 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1082 are inside partition, we can end up not removing the body since we no longer
1083 have analyzed node pointing to it. */
1084 && !node->in_other_partition
1085 && !DECL_EXTERNAL (decl))
1086 {
1087 dump_cgraph_node (stderr, node);
1088 internal_error ("failed to reclaim unneeded function");
1089 }
1090 #endif
1091 gcc_assert (node->global.inlined_to
1092 || !gimple_has_body_p (decl)
1093 || node->in_other_partition
1094 || DECL_EXTERNAL (decl));
1095
1096 }
1097
1098 }
1099 #ifdef ENABLE_CHECKING
1100 if (check_same_comdat_groups)
1101 for (node = cgraph_nodes; node; node = node->next)
1102 if (node->same_comdat_group && !node->process)
1103 {
1104 tree decl = node->decl;
1105 if (!node->global.inlined_to
1106 && gimple_has_body_p (decl)
1107 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1108 are inside partition, we can end up not removing the body since we no longer
1109 have analyzed node pointing to it. */
1110 && !node->in_other_partition
1111 && !DECL_EXTERNAL (decl))
1112 {
1113 dump_cgraph_node (stderr, node);
1114 internal_error ("failed to reclaim unneeded function");
1115 }
1116 }
1117 #endif
1118 }
1119
1120 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1121 in lowered gimple form.
1122
1123 Set current_function_decl and cfun to newly constructed empty function body.
1124 return basic block in the function body. */
1125
1126 static basic_block
1127 init_lowered_empty_function (tree decl)
1128 {
1129 basic_block bb;
1130
1131 current_function_decl = decl;
1132 allocate_struct_function (decl, false);
1133 gimple_register_cfg_hooks ();
1134 init_empty_tree_cfg ();
1135 init_tree_ssa (cfun);
1136 init_ssa_operands ();
1137 cfun->gimple_df->in_ssa_p = true;
1138 DECL_INITIAL (decl) = make_node (BLOCK);
1139
1140 DECL_SAVED_TREE (decl) = error_mark_node;
1141 cfun->curr_properties |=
1142 (PROP_gimple_lcf | PROP_gimple_leh | PROP_cfg | PROP_referenced_vars |
1143 PROP_ssa);
1144
1145 /* Create BB for body of the function and connect it properly. */
1146 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
1147 make_edge (ENTRY_BLOCK_PTR, bb, 0);
1148 make_edge (bb, EXIT_BLOCK_PTR, 0);
1149
1150 return bb;
1151 }
1152
1153 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1154 offset indicated by VIRTUAL_OFFSET, if that is
1155 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1156 zero for a result adjusting thunk. */
1157
1158 static tree
1159 thunk_adjust (gimple_stmt_iterator * bsi,
1160 tree ptr, bool this_adjusting,
1161 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1162 {
1163 gimple stmt;
1164 tree ret;
1165
1166 if (this_adjusting
1167 && fixed_offset != 0)
1168 {
1169 stmt = gimple_build_assign (ptr,
1170 fold_build2_loc (input_location,
1171 POINTER_PLUS_EXPR,
1172 TREE_TYPE (ptr), ptr,
1173 size_int (fixed_offset)));
1174 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1175 }
1176
1177 /* If there's a virtual offset, look up that value in the vtable and
1178 adjust the pointer again. */
1179 if (virtual_offset)
1180 {
1181 tree vtabletmp;
1182 tree vtabletmp2;
1183 tree vtabletmp3;
1184 tree offsettmp;
1185
1186 if (!vtable_entry_type)
1187 {
1188 tree vfunc_type = make_node (FUNCTION_TYPE);
1189 TREE_TYPE (vfunc_type) = integer_type_node;
1190 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1191 layout_type (vfunc_type);
1192
1193 vtable_entry_type = build_pointer_type (vfunc_type);
1194 }
1195
1196 vtabletmp =
1197 create_tmp_var (build_pointer_type
1198 (build_pointer_type (vtable_entry_type)), "vptr");
1199
1200 /* The vptr is always at offset zero in the object. */
1201 stmt = gimple_build_assign (vtabletmp,
1202 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1203 ptr));
1204 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1205 mark_symbols_for_renaming (stmt);
1206 find_referenced_vars_in (stmt);
1207
1208 /* Form the vtable address. */
1209 vtabletmp2 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp)),
1210 "vtableaddr");
1211 stmt = gimple_build_assign (vtabletmp2,
1212 build_simple_mem_ref (vtabletmp));
1213 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1214 mark_symbols_for_renaming (stmt);
1215 find_referenced_vars_in (stmt);
1216
1217 /* Find the entry with the vcall offset. */
1218 stmt = gimple_build_assign (vtabletmp2,
1219 fold_build2_loc (input_location,
1220 POINTER_PLUS_EXPR,
1221 TREE_TYPE (vtabletmp2),
1222 vtabletmp2,
1223 fold_convert (sizetype,
1224 virtual_offset)));
1225 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1226
1227 /* Get the offset itself. */
1228 vtabletmp3 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1229 "vcalloffset");
1230 stmt = gimple_build_assign (vtabletmp3,
1231 build_simple_mem_ref (vtabletmp2));
1232 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1233 mark_symbols_for_renaming (stmt);
1234 find_referenced_vars_in (stmt);
1235
1236 /* Cast to sizetype. */
1237 offsettmp = create_tmp_var (sizetype, "offset");
1238 stmt = gimple_build_assign (offsettmp, fold_convert (sizetype, vtabletmp3));
1239 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1240 mark_symbols_for_renaming (stmt);
1241 find_referenced_vars_in (stmt);
1242
1243 /* Adjust the `this' pointer. */
1244 ptr = fold_build2_loc (input_location,
1245 POINTER_PLUS_EXPR, TREE_TYPE (ptr), ptr,
1246 offsettmp);
1247 }
1248
1249 if (!this_adjusting
1250 && fixed_offset != 0)
1251 /* Adjust the pointer by the constant. */
1252 {
1253 tree ptrtmp;
1254
1255 if (TREE_CODE (ptr) == VAR_DECL)
1256 ptrtmp = ptr;
1257 else
1258 {
1259 ptrtmp = create_tmp_var (TREE_TYPE (ptr), "ptr");
1260 stmt = gimple_build_assign (ptrtmp, ptr);
1261 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1262 mark_symbols_for_renaming (stmt);
1263 find_referenced_vars_in (stmt);
1264 }
1265 ptr = fold_build2_loc (input_location,
1266 POINTER_PLUS_EXPR, TREE_TYPE (ptrtmp), ptrtmp,
1267 size_int (fixed_offset));
1268 }
1269
1270 /* Emit the statement and gimplify the adjustment expression. */
1271 ret = create_tmp_var (TREE_TYPE (ptr), "adjusted_this");
1272 stmt = gimple_build_assign (ret, ptr);
1273 mark_symbols_for_renaming (stmt);
1274 find_referenced_vars_in (stmt);
1275 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1276
1277 return ret;
1278 }
1279
1280 /* Produce assembler for thunk NODE. */
1281
1282 static void
1283 assemble_thunk (struct cgraph_node *node)
1284 {
1285 bool this_adjusting = node->thunk.this_adjusting;
1286 HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1287 HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1288 tree virtual_offset = NULL;
1289 tree alias = node->thunk.alias;
1290 tree thunk_fndecl = node->decl;
1291 tree a = DECL_ARGUMENTS (thunk_fndecl);
1292
1293 current_function_decl = thunk_fndecl;
1294
1295 if (this_adjusting
1296 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1297 virtual_value, alias))
1298 {
1299 const char *fnname;
1300 tree fn_block;
1301
1302 DECL_RESULT (thunk_fndecl)
1303 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1304 RESULT_DECL, 0, integer_type_node);
1305 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1306
1307 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1308 create one. */
1309 fn_block = make_node (BLOCK);
1310 BLOCK_VARS (fn_block) = a;
1311 DECL_INITIAL (thunk_fndecl) = fn_block;
1312 init_function_start (thunk_fndecl);
1313 cfun->is_thunk = 1;
1314 assemble_start_function (thunk_fndecl, fnname);
1315
1316 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1317 fixed_offset, virtual_value, alias);
1318
1319 assemble_end_function (thunk_fndecl, fnname);
1320 init_insn_lengths ();
1321 free_after_compilation (cfun);
1322 set_cfun (NULL);
1323 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1324 }
1325 else
1326 {
1327 tree restype;
1328 basic_block bb, then_bb, else_bb, return_bb;
1329 gimple_stmt_iterator bsi;
1330 int nargs = 0;
1331 tree arg;
1332 int i;
1333 tree resdecl;
1334 tree restmp = NULL;
1335 VEC(tree, heap) *vargs;
1336
1337 gimple call;
1338 gimple ret;
1339
1340 DECL_IGNORED_P (thunk_fndecl) = 1;
1341 bitmap_obstack_initialize (NULL);
1342
1343 if (node->thunk.virtual_offset_p)
1344 virtual_offset = size_int (virtual_value);
1345
1346 /* Build the return declaration for the function. */
1347 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1348 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1349 {
1350 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1351 DECL_ARTIFICIAL (resdecl) = 1;
1352 DECL_IGNORED_P (resdecl) = 1;
1353 DECL_RESULT (thunk_fndecl) = resdecl;
1354 }
1355 else
1356 resdecl = DECL_RESULT (thunk_fndecl);
1357
1358 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl);
1359
1360 bsi = gsi_start_bb (bb);
1361
1362 /* Build call to the function being thunked. */
1363 if (!VOID_TYPE_P (restype))
1364 {
1365 if (!is_gimple_reg_type (restype))
1366 {
1367 restmp = resdecl;
1368 add_local_decl (cfun, restmp);
1369 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1370 }
1371 else
1372 restmp = create_tmp_var_raw (restype, "retval");
1373 }
1374
1375 for (arg = a; arg; arg = DECL_CHAIN (arg))
1376 nargs++;
1377 vargs = VEC_alloc (tree, heap, nargs);
1378 if (this_adjusting)
1379 VEC_quick_push (tree, vargs,
1380 thunk_adjust (&bsi,
1381 a, 1, fixed_offset,
1382 virtual_offset));
1383 else
1384 VEC_quick_push (tree, vargs, a);
1385 for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
1386 VEC_quick_push (tree, vargs, arg);
1387 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1388 VEC_free (tree, heap, vargs);
1389 gimple_call_set_cannot_inline (call, true);
1390 gimple_call_set_from_thunk (call, true);
1391 if (restmp)
1392 gimple_call_set_lhs (call, restmp);
1393 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1394 mark_symbols_for_renaming (call);
1395 find_referenced_vars_in (call);
1396 update_stmt (call);
1397
1398 if (restmp && !this_adjusting)
1399 {
1400 tree true_label = NULL_TREE;
1401
1402 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1403 {
1404 gimple stmt;
1405 /* If the return type is a pointer, we need to
1406 protect against NULL. We know there will be an
1407 adjustment, because that's why we're emitting a
1408 thunk. */
1409 then_bb = create_basic_block (NULL, (void *) 0, bb);
1410 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1411 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1412 remove_edge (single_succ_edge (bb));
1413 true_label = gimple_block_label (then_bb);
1414 stmt = gimple_build_cond (NE_EXPR, restmp,
1415 build_zero_cst (TREE_TYPE (restmp)),
1416 NULL_TREE, NULL_TREE);
1417 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1418 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1419 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1420 make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1421 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1422 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1423 bsi = gsi_last_bb (then_bb);
1424 }
1425
1426 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1427 fixed_offset, virtual_offset);
1428 if (true_label)
1429 {
1430 gimple stmt;
1431 bsi = gsi_last_bb (else_bb);
1432 stmt = gimple_build_assign (restmp,
1433 build_zero_cst (TREE_TYPE (restmp)));
1434 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1435 bsi = gsi_last_bb (return_bb);
1436 }
1437 }
1438 else
1439 gimple_call_set_tail (call, true);
1440
1441 /* Build return value. */
1442 ret = gimple_build_return (restmp);
1443 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1444
1445 delete_unreachable_blocks ();
1446 update_ssa (TODO_update_ssa);
1447
1448 cgraph_remove_same_body_alias (node);
1449 /* Since we want to emit the thunk, we explicitly mark its name as
1450 referenced. */
1451 cgraph_add_new_function (thunk_fndecl, true);
1452 bitmap_obstack_release (NULL);
1453 }
1454 current_function_decl = NULL;
1455 }
1456
1457 /* Expand function specified by NODE. */
1458
1459 static void
1460 cgraph_expand_function (struct cgraph_node *node)
1461 {
1462 tree decl = node->decl;
1463
1464 /* We ought to not compile any inline clones. */
1465 gcc_assert (!node->global.inlined_to);
1466
1467 announce_function (decl);
1468 node->process = 0;
1469 if (node->same_body)
1470 {
1471 struct cgraph_node *alias, *next;
1472 bool saved_alias = node->alias;
1473 for (alias = node->same_body;
1474 alias && alias->next; alias = alias->next)
1475 ;
1476 /* Walk aliases in the order they were created; it is possible that
1477 thunks reffers to the aliases made earlier. */
1478 for (; alias; alias = next)
1479 {
1480 next = alias->previous;
1481 if (!alias->thunk.thunk_p)
1482 assemble_alias (alias->decl,
1483 DECL_ASSEMBLER_NAME (alias->thunk.alias));
1484 else
1485 assemble_thunk (alias);
1486 }
1487 node->alias = saved_alias;
1488 cgraph_process_new_functions ();
1489 }
1490
1491 gcc_assert (node->lowered);
1492
1493 /* Generate RTL for the body of DECL. */
1494 tree_rest_of_compilation (decl);
1495
1496 /* Make sure that BE didn't give up on compiling. */
1497 gcc_assert (TREE_ASM_WRITTEN (decl));
1498 current_function_decl = NULL;
1499 gcc_assert (!cgraph_preserve_function_body_p (decl));
1500 cgraph_release_function_body (node);
1501 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1502 points to the dead function body. */
1503 cgraph_node_remove_callees (node);
1504
1505 cgraph_function_flags_ready = true;
1506 }
1507
1508 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
1509
1510 bool
1511 cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason)
1512 {
1513 *reason = e->inline_failed;
1514 return !e->inline_failed;
1515 }
1516
1517
1518
1519 /* Expand all functions that must be output.
1520
1521 Attempt to topologically sort the nodes so function is output when
1522 all called functions are already assembled to allow data to be
1523 propagated across the callgraph. Use a stack to get smaller distance
1524 between a function and its callees (later we may choose to use a more
1525 sophisticated algorithm for function reordering; we will likely want
1526 to use subsections to make the output functions appear in top-down
1527 order). */
1528
1529 static void
1530 cgraph_expand_all_functions (void)
1531 {
1532 struct cgraph_node *node;
1533 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1534 int order_pos, new_order_pos = 0;
1535 int i;
1536
1537 order_pos = cgraph_postorder (order);
1538 gcc_assert (order_pos == cgraph_n_nodes);
1539
1540 /* Garbage collector may remove inline clones we eliminate during
1541 optimization. So we must be sure to not reference them. */
1542 for (i = 0; i < order_pos; i++)
1543 if (order[i]->process)
1544 order[new_order_pos++] = order[i];
1545
1546 for (i = new_order_pos - 1; i >= 0; i--)
1547 {
1548 node = order[i];
1549 if (node->process)
1550 {
1551 gcc_assert (node->reachable);
1552 node->process = 0;
1553 cgraph_expand_function (node);
1554 }
1555 }
1556 cgraph_process_new_functions ();
1557
1558 free (order);
1559
1560 }
1561
1562 /* This is used to sort the node types by the cgraph order number. */
1563
1564 enum cgraph_order_sort_kind
1565 {
1566 ORDER_UNDEFINED = 0,
1567 ORDER_FUNCTION,
1568 ORDER_VAR,
1569 ORDER_ASM
1570 };
1571
1572 struct cgraph_order_sort
1573 {
1574 enum cgraph_order_sort_kind kind;
1575 union
1576 {
1577 struct cgraph_node *f;
1578 struct varpool_node *v;
1579 struct cgraph_asm_node *a;
1580 } u;
1581 };
1582
1583 /* Output all functions, variables, and asm statements in the order
1584 according to their order fields, which is the order in which they
1585 appeared in the file. This implements -fno-toplevel-reorder. In
1586 this mode we may output functions and variables which don't really
1587 need to be output. */
1588
1589 static void
1590 cgraph_output_in_order (void)
1591 {
1592 int max;
1593 struct cgraph_order_sort *nodes;
1594 int i;
1595 struct cgraph_node *pf;
1596 struct varpool_node *pv;
1597 struct cgraph_asm_node *pa;
1598
1599 max = cgraph_order;
1600 nodes = XCNEWVEC (struct cgraph_order_sort, max);
1601
1602 varpool_analyze_pending_decls ();
1603
1604 for (pf = cgraph_nodes; pf; pf = pf->next)
1605 {
1606 if (pf->process)
1607 {
1608 i = pf->order;
1609 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1610 nodes[i].kind = ORDER_FUNCTION;
1611 nodes[i].u.f = pf;
1612 }
1613 }
1614
1615 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
1616 {
1617 i = pv->order;
1618 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1619 nodes[i].kind = ORDER_VAR;
1620 nodes[i].u.v = pv;
1621 }
1622
1623 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1624 {
1625 i = pa->order;
1626 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1627 nodes[i].kind = ORDER_ASM;
1628 nodes[i].u.a = pa;
1629 }
1630
1631 /* In toplevel reorder mode we output all statics; mark them as needed. */
1632 for (i = 0; i < max; ++i)
1633 {
1634 if (nodes[i].kind == ORDER_VAR)
1635 {
1636 varpool_mark_needed_node (nodes[i].u.v);
1637 }
1638 }
1639 varpool_empty_needed_queue ();
1640
1641 for (i = 0; i < max; ++i)
1642 {
1643 switch (nodes[i].kind)
1644 {
1645 case ORDER_FUNCTION:
1646 nodes[i].u.f->process = 0;
1647 cgraph_expand_function (nodes[i].u.f);
1648 break;
1649
1650 case ORDER_VAR:
1651 varpool_assemble_decl (nodes[i].u.v);
1652 break;
1653
1654 case ORDER_ASM:
1655 assemble_asm (nodes[i].u.a->asm_str);
1656 break;
1657
1658 case ORDER_UNDEFINED:
1659 break;
1660
1661 default:
1662 gcc_unreachable ();
1663 }
1664 }
1665
1666 cgraph_asm_nodes = NULL;
1667 free (nodes);
1668 }
1669
1670 /* Return true when function body of DECL still needs to be kept around
1671 for later re-use. */
1672 bool
1673 cgraph_preserve_function_body_p (tree decl)
1674 {
1675 struct cgraph_node *node;
1676
1677 gcc_assert (cgraph_global_info_ready);
1678 /* Look if there is any clone around. */
1679 node = cgraph_node (decl);
1680 if (node->clones)
1681 return true;
1682 return false;
1683 }
1684
1685 static void
1686 ipa_passes (void)
1687 {
1688 set_cfun (NULL);
1689 current_function_decl = NULL;
1690 gimple_register_cfg_hooks ();
1691 bitmap_obstack_initialize (NULL);
1692
1693 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
1694
1695 if (!in_lto_p)
1696 execute_ipa_pass_list (all_small_ipa_passes);
1697
1698 /* If pass_all_early_optimizations was not scheduled, the state of
1699 the cgraph will not be properly updated. Update it now. */
1700 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
1701 cgraph_state = CGRAPH_STATE_IPA_SSA;
1702
1703 if (!in_lto_p)
1704 {
1705 /* Generate coverage variables and constructors. */
1706 coverage_finish ();
1707
1708 /* Process new functions added. */
1709 set_cfun (NULL);
1710 current_function_decl = NULL;
1711 cgraph_process_new_functions ();
1712
1713 execute_ipa_summary_passes
1714 ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
1715 }
1716
1717 /* Some targets need to handle LTO assembler output specially. */
1718 if (flag_generate_lto)
1719 targetm.asm_out.lto_start ();
1720
1721 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
1722
1723 if (!in_lto_p)
1724 ipa_write_summaries ();
1725
1726 if (flag_generate_lto)
1727 targetm.asm_out.lto_end ();
1728
1729 if (!flag_ltrans)
1730 execute_ipa_pass_list (all_regular_ipa_passes);
1731 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
1732
1733 bitmap_obstack_release (NULL);
1734 }
1735
1736
1737 /* Perform simple optimizations based on callgraph. */
1738
1739 void
1740 cgraph_optimize (void)
1741 {
1742 if (seen_error ())
1743 return;
1744
1745 #ifdef ENABLE_CHECKING
1746 verify_cgraph ();
1747 #endif
1748
1749 /* Frontend may output common variables after the unit has been finalized.
1750 It is safe to deal with them here as they are always zero initialized. */
1751 varpool_analyze_pending_decls ();
1752
1753 timevar_push (TV_CGRAPHOPT);
1754 if (pre_ipa_mem_report)
1755 {
1756 fprintf (stderr, "Memory consumption before IPA\n");
1757 dump_memory_report (false);
1758 }
1759 if (!quiet_flag)
1760 fprintf (stderr, "Performing interprocedural optimizations\n");
1761 cgraph_state = CGRAPH_STATE_IPA;
1762
1763 /* Don't run the IPA passes if there was any error or sorry messages. */
1764 if (!seen_error ())
1765 ipa_passes ();
1766
1767 /* Do nothing else if any IPA pass found errors. */
1768 if (seen_error ())
1769 {
1770 timevar_pop (TV_CGRAPHOPT);
1771 return;
1772 }
1773
1774 /* This pass remove bodies of extern inline functions we never inlined.
1775 Do this later so other IPA passes see what is really going on. */
1776 cgraph_remove_unreachable_nodes (false, dump_file);
1777 cgraph_global_info_ready = true;
1778 if (cgraph_dump_file)
1779 {
1780 fprintf (cgraph_dump_file, "Optimized ");
1781 dump_cgraph (cgraph_dump_file);
1782 dump_varpool (cgraph_dump_file);
1783 }
1784 if (post_ipa_mem_report)
1785 {
1786 fprintf (stderr, "Memory consumption after IPA\n");
1787 dump_memory_report (false);
1788 }
1789 timevar_pop (TV_CGRAPHOPT);
1790
1791 /* Output everything. */
1792 (*debug_hooks->assembly_start) ();
1793 if (!quiet_flag)
1794 fprintf (stderr, "Assembling functions:\n");
1795 #ifdef ENABLE_CHECKING
1796 verify_cgraph ();
1797 #endif
1798
1799 cgraph_materialize_all_clones ();
1800 cgraph_mark_functions_to_output ();
1801
1802 cgraph_state = CGRAPH_STATE_EXPANSION;
1803 if (!flag_toplevel_reorder)
1804 cgraph_output_in_order ();
1805 else
1806 {
1807 cgraph_output_pending_asms ();
1808
1809 cgraph_expand_all_functions ();
1810 varpool_remove_unreferenced_decls ();
1811
1812 varpool_assemble_pending_decls ();
1813 }
1814 cgraph_process_new_functions ();
1815 cgraph_state = CGRAPH_STATE_FINISHED;
1816
1817 if (cgraph_dump_file)
1818 {
1819 fprintf (cgraph_dump_file, "\nFinal ");
1820 dump_cgraph (cgraph_dump_file);
1821 dump_varpool (cgraph_dump_file);
1822 }
1823 #ifdef ENABLE_CHECKING
1824 verify_cgraph ();
1825 /* Double check that all inline clones are gone and that all
1826 function bodies have been released from memory. */
1827 if (!seen_error ())
1828 {
1829 struct cgraph_node *node;
1830 bool error_found = false;
1831
1832 for (node = cgraph_nodes; node; node = node->next)
1833 if (node->analyzed
1834 && (node->global.inlined_to
1835 || gimple_has_body_p (node->decl)))
1836 {
1837 error_found = true;
1838 dump_cgraph_node (stderr, node);
1839 }
1840 if (error_found)
1841 internal_error ("nodes with unreleased memory found");
1842 }
1843 #endif
1844 }
1845
1846 void
1847 init_cgraph (void)
1848 {
1849 if (!cgraph_dump_file)
1850 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1851 }
1852
1853 /* The edges representing the callers of the NEW_VERSION node were
1854 fixed by cgraph_function_versioning (), now the call_expr in their
1855 respective tree code should be updated to call the NEW_VERSION. */
1856
1857 static void
1858 update_call_expr (struct cgraph_node *new_version)
1859 {
1860 struct cgraph_edge *e;
1861
1862 gcc_assert (new_version);
1863
1864 /* Update the call expr on the edges to call the new version. */
1865 for (e = new_version->callers; e; e = e->next_caller)
1866 {
1867 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
1868 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
1869 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
1870 }
1871 }
1872
1873
1874 /* Create a new cgraph node which is the new version of
1875 OLD_VERSION node. REDIRECT_CALLERS holds the callers
1876 edges which should be redirected to point to
1877 NEW_VERSION. ALL the callees edges of OLD_VERSION
1878 are cloned to the new version node. Return the new
1879 version node.
1880
1881 If non-NULL BLOCK_TO_COPY determine what basic blocks
1882 was copied to prevent duplications of calls that are dead
1883 in the clone. */
1884
1885 static struct cgraph_node *
1886 cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
1887 tree new_decl,
1888 VEC(cgraph_edge_p,heap) *redirect_callers,
1889 bitmap bbs_to_copy)
1890 {
1891 struct cgraph_node *new_version;
1892 struct cgraph_edge *e;
1893 unsigned i;
1894
1895 gcc_assert (old_version);
1896
1897 new_version = cgraph_node (new_decl);
1898
1899 new_version->analyzed = true;
1900 new_version->local = old_version->local;
1901 new_version->local.externally_visible = false;
1902 new_version->local.local = true;
1903 new_version->local.vtable_method = false;
1904 new_version->global = old_version->global;
1905 new_version->rtl = old_version->rtl;
1906 new_version->reachable = true;
1907 new_version->count = old_version->count;
1908
1909 for (e = old_version->callees; e; e=e->next_callee)
1910 if (!bbs_to_copy
1911 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
1912 cgraph_clone_edge (e, new_version, e->call_stmt,
1913 e->lto_stmt_uid, REG_BR_PROB_BASE,
1914 CGRAPH_FREQ_BASE,
1915 e->loop_nest, true);
1916 for (e = old_version->indirect_calls; e; e=e->next_callee)
1917 if (!bbs_to_copy
1918 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
1919 cgraph_clone_edge (e, new_version, e->call_stmt,
1920 e->lto_stmt_uid, REG_BR_PROB_BASE,
1921 CGRAPH_FREQ_BASE,
1922 e->loop_nest, true);
1923 FOR_EACH_VEC_ELT (cgraph_edge_p, redirect_callers, i, e)
1924 {
1925 /* Redirect calls to the old version node to point to its new
1926 version. */
1927 cgraph_redirect_edge_callee (e, new_version);
1928 }
1929
1930 return new_version;
1931 }
1932
1933 /* Perform function versioning.
1934 Function versioning includes copying of the tree and
1935 a callgraph update (creating a new cgraph node and updating
1936 its callees and callers).
1937
1938 REDIRECT_CALLERS varray includes the edges to be redirected
1939 to the new version.
1940
1941 TREE_MAP is a mapping of tree nodes we want to replace with
1942 new ones (according to results of prior analysis).
1943 OLD_VERSION_NODE is the node that is versioned.
1944 It returns the new version's cgraph node.
1945 If non-NULL ARGS_TO_SKIP determine function parameters to remove
1946 from new version.
1947 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
1948 If non_NULL NEW_ENTRY determine new entry BB of the clone. */
1949
1950 struct cgraph_node *
1951 cgraph_function_versioning (struct cgraph_node *old_version_node,
1952 VEC(cgraph_edge_p,heap) *redirect_callers,
1953 VEC (ipa_replace_map_p,gc)* tree_map,
1954 bitmap args_to_skip,
1955 bitmap bbs_to_copy,
1956 basic_block new_entry_block,
1957 const char *clone_name)
1958 {
1959 tree old_decl = old_version_node->decl;
1960 struct cgraph_node *new_version_node = NULL;
1961 tree new_decl;
1962
1963 if (!tree_versionable_function_p (old_decl))
1964 return NULL;
1965
1966 /* Make a new FUNCTION_DECL tree node for the
1967 new version. */
1968 if (!args_to_skip)
1969 new_decl = copy_node (old_decl);
1970 else
1971 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
1972
1973 /* Generate a new name for the new version. */
1974 DECL_NAME (new_decl) = clone_function_name (old_decl, clone_name);
1975 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
1976 SET_DECL_RTL (new_decl, NULL);
1977
1978 /* Create the new version's call-graph node.
1979 and update the edges of the new node. */
1980 new_version_node =
1981 cgraph_copy_node_for_versioning (old_version_node, new_decl,
1982 redirect_callers, bbs_to_copy);
1983
1984 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1985 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip,
1986 bbs_to_copy, new_entry_block);
1987
1988 /* Update the new version's properties.
1989 Make The new version visible only within this translation unit. Make sure
1990 that is not weak also.
1991 ??? We cannot use COMDAT linkage because there is no
1992 ABI support for this. */
1993 cgraph_make_decl_local (new_version_node->decl);
1994 DECL_VIRTUAL_P (new_version_node->decl) = 0;
1995 new_version_node->local.externally_visible = 0;
1996 new_version_node->local.local = 1;
1997 new_version_node->lowered = true;
1998
1999 /* Update the call_expr on the edges to call the new version node. */
2000 update_call_expr (new_version_node);
2001
2002 cgraph_call_function_insertion_hooks (new_version_node);
2003 return new_version_node;
2004 }
2005
2006 /* Produce separate function body for inline clones so the offline copy can be
2007 modified without affecting them. */
2008 struct cgraph_node *
2009 save_inline_function_body (struct cgraph_node *node)
2010 {
2011 struct cgraph_node *first_clone, *n;
2012
2013 gcc_assert (node == cgraph_node (node->decl));
2014
2015 cgraph_lower_function (node);
2016
2017 first_clone = node->clones;
2018
2019 first_clone->decl = copy_node (node->decl);
2020 cgraph_insert_node_to_hashtable (first_clone);
2021 gcc_assert (first_clone == cgraph_node (first_clone->decl));
2022 if (first_clone->next_sibling_clone)
2023 {
2024 for (n = first_clone->next_sibling_clone; n->next_sibling_clone; n = n->next_sibling_clone)
2025 n->clone_of = first_clone;
2026 n->clone_of = first_clone;
2027 n->next_sibling_clone = first_clone->clones;
2028 if (first_clone->clones)
2029 first_clone->clones->prev_sibling_clone = n;
2030 first_clone->clones = first_clone->next_sibling_clone;
2031 first_clone->next_sibling_clone->prev_sibling_clone = NULL;
2032 first_clone->next_sibling_clone = NULL;
2033 gcc_assert (!first_clone->prev_sibling_clone);
2034 }
2035 first_clone->clone_of = NULL;
2036 node->clones = NULL;
2037
2038 if (first_clone->clones)
2039 for (n = first_clone->clones; n != first_clone;)
2040 {
2041 gcc_assert (n->decl == node->decl);
2042 n->decl = first_clone->decl;
2043 if (n->clones)
2044 n = n->clones;
2045 else if (n->next_sibling_clone)
2046 n = n->next_sibling_clone;
2047 else
2048 {
2049 while (n != first_clone && !n->next_sibling_clone)
2050 n = n->clone_of;
2051 if (n != first_clone)
2052 n = n->next_sibling_clone;
2053 }
2054 }
2055
2056 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2057 tree_function_versioning (node->decl, first_clone->decl, NULL, true, NULL,
2058 NULL, NULL);
2059
2060 DECL_EXTERNAL (first_clone->decl) = 0;
2061 DECL_COMDAT_GROUP (first_clone->decl) = NULL_TREE;
2062 TREE_PUBLIC (first_clone->decl) = 0;
2063 DECL_COMDAT (first_clone->decl) = 0;
2064 VEC_free (ipa_opt_pass, heap,
2065 first_clone->ipa_transforms_to_apply);
2066 first_clone->ipa_transforms_to_apply = NULL;
2067
2068 #ifdef ENABLE_CHECKING
2069 verify_cgraph_node (first_clone);
2070 #endif
2071 return first_clone;
2072 }
2073
2074 /* Given virtual clone, turn it into actual clone. */
2075 static void
2076 cgraph_materialize_clone (struct cgraph_node *node)
2077 {
2078 bitmap_obstack_initialize (NULL);
2079 node->former_clone_of = node->clone_of->decl;
2080 if (node->clone_of->former_clone_of)
2081 node->former_clone_of = node->clone_of->former_clone_of;
2082 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2083 tree_function_versioning (node->clone_of->decl, node->decl,
2084 node->clone.tree_map, true,
2085 node->clone.args_to_skip, NULL, NULL);
2086 if (cgraph_dump_file)
2087 {
2088 dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
2089 dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
2090 }
2091
2092 /* Function is no longer clone. */
2093 if (node->next_sibling_clone)
2094 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
2095 if (node->prev_sibling_clone)
2096 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
2097 else
2098 node->clone_of->clones = node->next_sibling_clone;
2099 node->next_sibling_clone = NULL;
2100 node->prev_sibling_clone = NULL;
2101 if (!node->clone_of->analyzed && !node->clone_of->clones)
2102 {
2103 cgraph_release_function_body (node->clone_of);
2104 cgraph_node_remove_callees (node->clone_of);
2105 ipa_remove_all_references (&node->clone_of->ref_list);
2106 }
2107 node->clone_of = NULL;
2108 bitmap_obstack_release (NULL);
2109 }
2110
2111 /* If necessary, change the function declaration in the call statement
2112 associated with E so that it corresponds to the edge callee. */
2113
2114 gimple
2115 cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e)
2116 {
2117 tree decl = gimple_call_fndecl (e->call_stmt);
2118 gimple new_stmt;
2119 #ifdef ENABLE_CHECKING
2120 struct cgraph_node *node;
2121 #endif
2122
2123 if (e->indirect_unknown_callee
2124 || decl == e->callee->decl
2125 /* Don't update call from same body alias to the real function. */
2126 || (decl && cgraph_get_node (decl) == cgraph_get_node (e->callee->decl)))
2127 return e->call_stmt;
2128
2129 #ifdef ENABLE_CHECKING
2130 if (decl)
2131 {
2132 node = cgraph_get_node (decl);
2133 gcc_assert (!node || !node->clone.combined_args_to_skip);
2134 }
2135 #endif
2136
2137 if (cgraph_dump_file)
2138 {
2139 fprintf (cgraph_dump_file, "updating call of %s/%i -> %s/%i: ",
2140 cgraph_node_name (e->caller), e->caller->uid,
2141 cgraph_node_name (e->callee), e->callee->uid);
2142 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2143 if (e->callee->clone.combined_args_to_skip)
2144 {
2145 fprintf (cgraph_dump_file, " combined args to skip: ");
2146 dump_bitmap (cgraph_dump_file,
2147 e->callee->clone.combined_args_to_skip);
2148 }
2149 }
2150
2151 if (e->callee->clone.combined_args_to_skip)
2152 {
2153 gimple_stmt_iterator gsi;
2154 int lp_nr;
2155
2156 new_stmt
2157 = gimple_call_copy_skip_args (e->call_stmt,
2158 e->callee->clone.combined_args_to_skip);
2159 gimple_call_set_fndecl (new_stmt, e->callee->decl);
2160
2161 if (gimple_vdef (new_stmt)
2162 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
2163 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2164
2165 gsi = gsi_for_stmt (e->call_stmt);
2166 gsi_replace (&gsi, new_stmt, false);
2167 /* We need to defer cleaning EH info on the new statement to
2168 fixup-cfg. We may not have dominator information at this point
2169 and thus would end up with unreachable blocks and have no way
2170 to communicate that we need to run CFG cleanup then. */
2171 lp_nr = lookup_stmt_eh_lp (e->call_stmt);
2172 if (lp_nr != 0)
2173 {
2174 remove_stmt_from_eh_lp (e->call_stmt);
2175 add_stmt_to_eh_lp (new_stmt, lp_nr);
2176 }
2177 }
2178 else
2179 {
2180 new_stmt = e->call_stmt;
2181 gimple_call_set_fndecl (new_stmt, e->callee->decl);
2182 update_stmt (new_stmt);
2183 }
2184
2185 cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt, new_stmt);
2186
2187 if (cgraph_dump_file)
2188 {
2189 fprintf (cgraph_dump_file, " updated to:");
2190 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2191 }
2192 return new_stmt;
2193 }
2194
2195 /* Once all functions from compilation unit are in memory, produce all clones
2196 and update all calls. We might also do this on demand if we don't want to
2197 bring all functions to memory prior compilation, but current WHOPR
2198 implementation does that and it is is bit easier to keep everything right in
2199 this order. */
2200 void
2201 cgraph_materialize_all_clones (void)
2202 {
2203 struct cgraph_node *node;
2204 bool stabilized = false;
2205
2206 if (cgraph_dump_file)
2207 fprintf (cgraph_dump_file, "Materializing clones\n");
2208 #ifdef ENABLE_CHECKING
2209 verify_cgraph ();
2210 #endif
2211
2212 /* We can also do topological order, but number of iterations should be
2213 bounded by number of IPA passes since single IPA pass is probably not
2214 going to create clones of clones it created itself. */
2215 while (!stabilized)
2216 {
2217 stabilized = true;
2218 for (node = cgraph_nodes; node; node = node->next)
2219 {
2220 if (node->clone_of && node->decl != node->clone_of->decl
2221 && !gimple_has_body_p (node->decl))
2222 {
2223 if (gimple_has_body_p (node->clone_of->decl))
2224 {
2225 if (cgraph_dump_file)
2226 {
2227 fprintf (cgraph_dump_file, "clonning %s to %s\n",
2228 cgraph_node_name (node->clone_of),
2229 cgraph_node_name (node));
2230 if (node->clone.tree_map)
2231 {
2232 unsigned int i;
2233 fprintf (cgraph_dump_file, " replace map: ");
2234 for (i = 0; i < VEC_length (ipa_replace_map_p,
2235 node->clone.tree_map);
2236 i++)
2237 {
2238 struct ipa_replace_map *replace_info;
2239 replace_info = VEC_index (ipa_replace_map_p,
2240 node->clone.tree_map,
2241 i);
2242 print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
2243 fprintf (cgraph_dump_file, " -> ");
2244 print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
2245 fprintf (cgraph_dump_file, "%s%s;",
2246 replace_info->replace_p ? "(replace)":"",
2247 replace_info->ref_p ? "(ref)":"");
2248 }
2249 fprintf (cgraph_dump_file, "\n");
2250 }
2251 if (node->clone.args_to_skip)
2252 {
2253 fprintf (cgraph_dump_file, " args_to_skip: ");
2254 dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
2255 }
2256 if (node->clone.args_to_skip)
2257 {
2258 fprintf (cgraph_dump_file, " combined_args_to_skip:");
2259 dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
2260 }
2261 }
2262 cgraph_materialize_clone (node);
2263 stabilized = false;
2264 }
2265 }
2266 }
2267 }
2268 for (node = cgraph_nodes; node; node = node->next)
2269 if (!node->analyzed && node->callees)
2270 cgraph_node_remove_callees (node);
2271 if (cgraph_dump_file)
2272 fprintf (cgraph_dump_file, "Materialization Call site updates done.\n");
2273 #ifdef ENABLE_CHECKING
2274 verify_cgraph ();
2275 #endif
2276 cgraph_remove_unreachable_nodes (false, cgraph_dump_file);
2277 }
2278
2279 #include "gt-cgraphunit.h"