* config/nvptx/nvptx.md (call_operation): Remove unused variables.
[gcc.git] / gcc / cgraphclones.c
1 /* Callgraph clones
2 Copyright (C) 2003-2015 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This module provide facilities for clonning functions. I.e. creating
22 new functions based on existing functions with simple modifications,
23 such as replacement of parameters.
24
25 To allow whole program optimization without actual presence of function
26 bodies, an additional infrastructure is provided for so-called virtual
27 clones
28
29 A virtual clone in the callgraph is a function that has no
30 associated body, just a description of how to create its body based
31 on a different function (which itself may be a virtual clone).
32
33 The description of function modifications includes adjustments to
34 the function's signature (which allows, for example, removing or
35 adding function arguments), substitutions to perform on the
36 function body, and, for inlined functions, a pointer to the
37 function that it will be inlined into.
38
39 It is also possible to redirect any edge of the callgraph from a
40 function to its virtual clone. This implies updating of the call
41 site to adjust for the new function signature.
42
43 Most of the transformations performed by inter-procedural
44 optimizations can be represented via virtual clones. For
45 instance, a constant propagation pass can produce a virtual clone
46 of the function which replaces one of its arguments by a
47 constant. The inliner can represent its decisions by producing a
48 clone of a function whose body will be later integrated into
49 a given function.
50
51 Using virtual clones, the program can be easily updated
52 during the Execute stage, solving most of pass interactions
53 problems that would otherwise occur during Transform.
54
55 Virtual clones are later materialized in the LTRANS stage and
56 turned into real functions. Passes executed after the virtual
57 clone were introduced also perform their Transform stage
58 on new functions, so for a pass there is no significant
59 difference between operating on a real function or a virtual
60 clone introduced before its Execute stage.
61
62 Optimization passes then work on virtual clones introduced before
63 their Execute stage as if they were real functions. The
64 only difference is that clones are not visible during the
65 Generate Summary stage. */
66
67 #include "config.h"
68 #include "system.h"
69 #include "coretypes.h"
70 #include "tm.h"
71 #include "rtl.h"
72 #include "alias.h"
73 #include "symtab.h"
74 #include "tree.h"
75 #include "fold-const.h"
76 #include "stringpool.h"
77 #include "hard-reg-set.h"
78 #include "function.h"
79 #include "emit-rtl.h"
80 #include "predict.h"
81 #include "basic-block.h"
82 #include "tree-ssa-alias.h"
83 #include "internal-fn.h"
84 #include "tree-eh.h"
85 #include "gimple-expr.h"
86 #include "gimple.h"
87 #include "bitmap.h"
88 #include "tree-cfg.h"
89 #include "tree-inline.h"
90 #include "langhooks.h"
91 #include "toplev.h"
92 #include "flags.h"
93 #include "debug.h"
94 #include "target.h"
95 #include "diagnostic.h"
96 #include "params.h"
97 #include "intl.h"
98 #include "cgraph.h"
99 #include "alloc-pool.h"
100 #include "symbol-summary.h"
101 #include "ipa-prop.h"
102 #include "tree-iterator.h"
103 #include "tree-dump.h"
104 #include "gimple-pretty-print.h"
105 #include "coverage.h"
106 #include "ipa-inline.h"
107 #include "ipa-utils.h"
108 #include "lto-streamer.h"
109 #include "except.h"
110
111 /* Create clone of edge in the node N represented by CALL_EXPR
112 the callgraph. */
113
114 cgraph_edge *
115 cgraph_edge::clone (cgraph_node *n, gcall *call_stmt, unsigned stmt_uid,
116 gcov_type count_scale, int freq_scale, bool update_original)
117 {
118 cgraph_edge *new_edge;
119 gcov_type gcov_count = apply_probability (count, count_scale);
120 gcov_type freq;
121
122 /* We do not want to ignore loop nest after frequency drops to 0. */
123 if (!freq_scale)
124 freq_scale = 1;
125 freq = frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE;
126 if (freq > CGRAPH_FREQ_MAX)
127 freq = CGRAPH_FREQ_MAX;
128
129 if (indirect_unknown_callee)
130 {
131 tree decl;
132
133 if (call_stmt && (decl = gimple_call_fndecl (call_stmt))
134 /* When the call is speculative, we need to resolve it
135 via cgraph_resolve_speculation and not here. */
136 && !speculative)
137 {
138 cgraph_node *callee = cgraph_node::get (decl);
139 gcc_checking_assert (callee);
140 new_edge = n->create_edge (callee, call_stmt, gcov_count, freq);
141 }
142 else
143 {
144 new_edge = n->create_indirect_edge (call_stmt,
145 indirect_info->ecf_flags,
146 count, freq, false);
147 *new_edge->indirect_info = *indirect_info;
148 }
149 }
150 else
151 {
152 new_edge = n->create_edge (callee, call_stmt, gcov_count, freq);
153 if (indirect_info)
154 {
155 new_edge->indirect_info
156 = ggc_cleared_alloc<cgraph_indirect_call_info> ();
157 *new_edge->indirect_info = *indirect_info;
158 }
159 }
160
161 new_edge->inline_failed = inline_failed;
162 new_edge->indirect_inlining_edge = indirect_inlining_edge;
163 new_edge->lto_stmt_uid = stmt_uid;
164 /* Clone flags that depend on call_stmt availability manually. */
165 new_edge->can_throw_external = can_throw_external;
166 new_edge->call_stmt_cannot_inline_p = call_stmt_cannot_inline_p;
167 new_edge->speculative = speculative;
168 new_edge->in_polymorphic_cdtor = in_polymorphic_cdtor;
169 if (update_original)
170 {
171 count -= new_edge->count;
172 if (count < 0)
173 count = 0;
174 }
175 symtab->call_edge_duplication_hooks (this, new_edge);
176 return new_edge;
177 }
178
179 /* Build variant of function type ORIG_TYPE skipping ARGS_TO_SKIP and the
180 return value if SKIP_RETURN is true. */
181
182 static tree
183 build_function_type_skip_args (tree orig_type, bitmap args_to_skip,
184 bool skip_return)
185 {
186 tree new_type = NULL;
187 tree args, new_args = NULL;
188 tree new_reversed;
189 int i = 0;
190
191 for (args = TYPE_ARG_TYPES (orig_type); args && args != void_list_node;
192 args = TREE_CHAIN (args), i++)
193 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
194 new_args = tree_cons (NULL_TREE, TREE_VALUE (args), new_args);
195
196 new_reversed = nreverse (new_args);
197 if (args)
198 {
199 if (new_reversed)
200 TREE_CHAIN (new_args) = void_list_node;
201 else
202 new_reversed = void_list_node;
203 }
204
205 /* Use copy_node to preserve as much as possible from original type
206 (debug info, attribute lists etc.)
207 Exception is METHOD_TYPEs must have THIS argument.
208 When we are asked to remove it, we need to build new FUNCTION_TYPE
209 instead. */
210 if (TREE_CODE (orig_type) != METHOD_TYPE
211 || !args_to_skip
212 || !bitmap_bit_p (args_to_skip, 0))
213 {
214 new_type = build_distinct_type_copy (orig_type);
215 TYPE_ARG_TYPES (new_type) = new_reversed;
216 }
217 else
218 {
219 new_type
220 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
221 new_reversed));
222 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
223 }
224
225 if (skip_return)
226 TREE_TYPE (new_type) = void_type_node;
227
228 return new_type;
229 }
230
231 /* Build variant of function decl ORIG_DECL skipping ARGS_TO_SKIP and the
232 return value if SKIP_RETURN is true.
233
234 Arguments from DECL_ARGUMENTS list can't be removed now, since they are
235 linked by TREE_CHAIN directly. The caller is responsible for eliminating
236 them when they are being duplicated (i.e. copy_arguments_for_versioning). */
237
238 static tree
239 build_function_decl_skip_args (tree orig_decl, bitmap args_to_skip,
240 bool skip_return)
241 {
242 tree new_decl = copy_node (orig_decl);
243 tree new_type;
244
245 new_type = TREE_TYPE (orig_decl);
246 if (prototype_p (new_type)
247 || (skip_return && !VOID_TYPE_P (TREE_TYPE (new_type))))
248 new_type
249 = build_function_type_skip_args (new_type, args_to_skip, skip_return);
250 TREE_TYPE (new_decl) = new_type;
251
252 /* For declarations setting DECL_VINDEX (i.e. methods)
253 we expect first argument to be THIS pointer. */
254 if (args_to_skip && bitmap_bit_p (args_to_skip, 0))
255 DECL_VINDEX (new_decl) = NULL_TREE;
256
257 /* When signature changes, we need to clear builtin info. */
258 if (DECL_BUILT_IN (new_decl)
259 && args_to_skip
260 && !bitmap_empty_p (args_to_skip))
261 {
262 DECL_BUILT_IN_CLASS (new_decl) = NOT_BUILT_IN;
263 DECL_FUNCTION_CODE (new_decl) = (enum built_in_function) 0;
264 }
265 /* The FE might have information and assumptions about the other
266 arguments. */
267 DECL_LANG_SPECIFIC (new_decl) = NULL;
268 return new_decl;
269 }
270
271 /* Set flags of NEW_NODE and its decl. NEW_NODE is a newly created private
272 clone or its thunk. */
273
274 static void
275 set_new_clone_decl_and_node_flags (cgraph_node *new_node)
276 {
277 DECL_EXTERNAL (new_node->decl) = 0;
278 TREE_PUBLIC (new_node->decl) = 0;
279 DECL_COMDAT (new_node->decl) = 0;
280 DECL_WEAK (new_node->decl) = 0;
281 DECL_VIRTUAL_P (new_node->decl) = 0;
282 DECL_STATIC_CONSTRUCTOR (new_node->decl) = 0;
283 DECL_STATIC_DESTRUCTOR (new_node->decl) = 0;
284
285 new_node->externally_visible = 0;
286 new_node->local.local = 1;
287 new_node->lowered = true;
288 }
289
290 /* Duplicate thunk THUNK if necessary but make it to refer to NODE.
291 ARGS_TO_SKIP, if non-NULL, determines which parameters should be omitted.
292 Function can return NODE if no thunk is necessary, which can happen when
293 thunk is this_adjusting but we are removing this parameter. */
294
295 static cgraph_node *
296 duplicate_thunk_for_node (cgraph_node *thunk, cgraph_node *node)
297 {
298 cgraph_node *new_thunk, *thunk_of;
299 thunk_of = thunk->callees->callee->ultimate_alias_target ();
300
301 if (thunk_of->thunk.thunk_p)
302 node = duplicate_thunk_for_node (thunk_of, node);
303
304 if (!DECL_ARGUMENTS (thunk->decl))
305 thunk->get_untransformed_body ();
306
307 cgraph_edge *cs;
308 for (cs = node->callers; cs; cs = cs->next_caller)
309 if (cs->caller->thunk.thunk_p
310 && cs->caller->thunk.this_adjusting == thunk->thunk.this_adjusting
311 && cs->caller->thunk.fixed_offset == thunk->thunk.fixed_offset
312 && cs->caller->thunk.virtual_offset_p == thunk->thunk.virtual_offset_p
313 && cs->caller->thunk.virtual_value == thunk->thunk.virtual_value)
314 return cs->caller;
315
316 tree new_decl;
317 if (!node->clone.args_to_skip)
318 new_decl = copy_node (thunk->decl);
319 else
320 {
321 /* We do not need to duplicate this_adjusting thunks if we have removed
322 this. */
323 if (thunk->thunk.this_adjusting
324 && bitmap_bit_p (node->clone.args_to_skip, 0))
325 return node;
326
327 new_decl = build_function_decl_skip_args (thunk->decl,
328 node->clone.args_to_skip,
329 false);
330 }
331
332 tree *link = &DECL_ARGUMENTS (new_decl);
333 int i = 0;
334 for (tree pd = DECL_ARGUMENTS (thunk->decl); pd; pd = DECL_CHAIN (pd), i++)
335 {
336 if (!node->clone.args_to_skip
337 || !bitmap_bit_p (node->clone.args_to_skip, i))
338 {
339 tree nd = copy_node (pd);
340 DECL_CONTEXT (nd) = new_decl;
341 *link = nd;
342 link = &DECL_CHAIN (nd);
343 }
344 }
345 *link = NULL_TREE;
346
347 gcc_checking_assert (!DECL_STRUCT_FUNCTION (new_decl));
348 gcc_checking_assert (!DECL_INITIAL (new_decl));
349 gcc_checking_assert (!DECL_RESULT (new_decl));
350 gcc_checking_assert (!DECL_RTL_SET_P (new_decl));
351
352 DECL_NAME (new_decl) = clone_function_name (thunk->decl, "artificial_thunk");
353 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
354
355 new_thunk = cgraph_node::create (new_decl);
356 set_new_clone_decl_and_node_flags (new_thunk);
357 new_thunk->definition = true;
358 new_thunk->thunk = thunk->thunk;
359 new_thunk->unique_name = in_lto_p;
360 new_thunk->former_clone_of = thunk->decl;
361 new_thunk->clone.args_to_skip = node->clone.args_to_skip;
362 new_thunk->clone.combined_args_to_skip = node->clone.combined_args_to_skip;
363
364 cgraph_edge *e = new_thunk->create_edge (node, NULL, 0,
365 CGRAPH_FREQ_BASE);
366 e->call_stmt_cannot_inline_p = true;
367 symtab->call_edge_duplication_hooks (thunk->callees, e);
368 symtab->call_cgraph_duplication_hooks (thunk, new_thunk);
369 return new_thunk;
370 }
371
372 /* If E does not lead to a thunk, simply redirect it to N. Otherwise create
373 one or more equivalent thunks for N and redirect E to the first in the
374 chain. Note that it is then necessary to call
375 n->expand_all_artificial_thunks once all callers are redirected. */
376
377 void
378 cgraph_edge::redirect_callee_duplicating_thunks (cgraph_node *n)
379 {
380 cgraph_node *orig_to = callee->ultimate_alias_target ();
381 if (orig_to->thunk.thunk_p)
382 n = duplicate_thunk_for_node (orig_to, n);
383
384 redirect_callee (n);
385 }
386
387 /* Call expand_thunk on all callers that are thunks and if analyze those nodes
388 that were expanded. */
389
390 void
391 cgraph_node::expand_all_artificial_thunks ()
392 {
393 cgraph_edge *e;
394 for (e = callers; e;)
395 if (e->caller->thunk.thunk_p)
396 {
397 cgraph_node *thunk = e->caller;
398
399 e = e->next_caller;
400 if (thunk->expand_thunk (false, false))
401 {
402 thunk->thunk.thunk_p = false;
403 thunk->analyze ();
404 }
405 thunk->expand_all_artificial_thunks ();
406 }
407 else
408 e = e->next_caller;
409 }
410
411 /* Create node representing clone of N executed COUNT times. Decrease
412 the execution counts from original node too.
413 The new clone will have decl set to DECL that may or may not be the same
414 as decl of N.
415
416 When UPDATE_ORIGINAL is true, the counts are subtracted from the original
417 function's profile to reflect the fact that part of execution is handled
418 by node.
419 When CALL_DUPLICATOIN_HOOK is true, the ipa passes are acknowledged about
420 the new clone. Otherwise the caller is responsible for doing so later.
421
422 If the new node is being inlined into another one, NEW_INLINED_TO should be
423 the outline function the new one is (even indirectly) inlined to. All hooks
424 will see this in node's global.inlined_to, when invoked. Can be NULL if the
425 node is not inlined. */
426
427 cgraph_node *
428 cgraph_node::create_clone (tree new_decl, gcov_type gcov_count, int freq,
429 bool update_original,
430 vec<cgraph_edge *> redirect_callers,
431 bool call_duplication_hook,
432 cgraph_node *new_inlined_to,
433 bitmap args_to_skip)
434 {
435 cgraph_node *new_node = symtab->create_empty ();
436 cgraph_edge *e;
437 gcov_type count_scale;
438 unsigned i;
439
440 new_node->decl = new_decl;
441 new_node->register_symbol ();
442 new_node->origin = origin;
443 new_node->lto_file_data = lto_file_data;
444 if (new_node->origin)
445 {
446 new_node->next_nested = new_node->origin->nested;
447 new_node->origin->nested = new_node;
448 }
449 new_node->analyzed = analyzed;
450 new_node->definition = definition;
451 new_node->local = local;
452 new_node->externally_visible = false;
453 new_node->no_reorder = no_reorder;
454 new_node->local.local = true;
455 new_node->global = global;
456 new_node->global.inlined_to = new_inlined_to;
457 new_node->rtl = rtl;
458 new_node->count = count;
459 new_node->frequency = frequency;
460 new_node->tp_first_run = tp_first_run;
461 new_node->tm_clone = tm_clone;
462 new_node->icf_merged = icf_merged;
463 new_node->merged = merged;
464
465 new_node->clone.tree_map = NULL;
466 new_node->clone.args_to_skip = args_to_skip;
467 new_node->split_part = split_part;
468 if (!args_to_skip)
469 new_node->clone.combined_args_to_skip = clone.combined_args_to_skip;
470 else if (clone.combined_args_to_skip)
471 {
472 new_node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
473 bitmap_ior (new_node->clone.combined_args_to_skip,
474 clone.combined_args_to_skip, args_to_skip);
475 }
476 else
477 new_node->clone.combined_args_to_skip = args_to_skip;
478
479 if (count)
480 {
481 if (new_node->count > count)
482 count_scale = REG_BR_PROB_BASE;
483 else
484 count_scale = GCOV_COMPUTE_SCALE (new_node->count, count);
485 }
486 else
487 count_scale = 0;
488 if (update_original)
489 {
490 count -= gcov_count;
491 if (count < 0)
492 count = 0;
493 }
494
495 FOR_EACH_VEC_ELT (redirect_callers, i, e)
496 {
497 /* Redirect calls to the old version node to point to its new
498 version. The only exception is when the edge was proved to
499 be unreachable during the clonning procedure. */
500 if (!e->callee
501 || DECL_BUILT_IN_CLASS (e->callee->decl) != BUILT_IN_NORMAL
502 || DECL_FUNCTION_CODE (e->callee->decl) != BUILT_IN_UNREACHABLE)
503 e->redirect_callee_duplicating_thunks (new_node);
504 }
505 new_node->expand_all_artificial_thunks ();
506
507 for (e = callees;e; e=e->next_callee)
508 e->clone (new_node, e->call_stmt, e->lto_stmt_uid, count_scale,
509 freq, update_original);
510
511 for (e = indirect_calls; e; e = e->next_callee)
512 e->clone (new_node, e->call_stmt, e->lto_stmt_uid,
513 count_scale, freq, update_original);
514 new_node->clone_references (this);
515
516 new_node->next_sibling_clone = clones;
517 if (clones)
518 clones->prev_sibling_clone = new_node;
519 clones = new_node;
520 new_node->clone_of = this;
521
522 if (call_duplication_hook)
523 symtab->call_cgraph_duplication_hooks (this, new_node);
524 return new_node;
525 }
526
527 static GTY(()) unsigned int clone_fn_id_num;
528
529 /* Return a new assembler name for a clone with SUFFIX of a decl named
530 NAME. */
531
532 tree
533 clone_function_name_1 (const char *name, const char *suffix)
534 {
535 size_t len = strlen (name);
536 char *tmp_name, *prefix;
537
538 prefix = XALLOCAVEC (char, len + strlen (suffix) + 2);
539 memcpy (prefix, name, len);
540 strcpy (prefix + len + 1, suffix);
541 #ifndef NO_DOT_IN_LABEL
542 prefix[len] = '.';
543 #elif !defined NO_DOLLAR_IN_LABEL
544 prefix[len] = '$';
545 #else
546 prefix[len] = '_';
547 #endif
548 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix, clone_fn_id_num++);
549 return get_identifier (tmp_name);
550 }
551
552 /* Return a new assembler name for a clone of DECL with SUFFIX. */
553
554 tree
555 clone_function_name (tree decl, const char *suffix)
556 {
557 tree name = DECL_ASSEMBLER_NAME (decl);
558 return clone_function_name_1 (IDENTIFIER_POINTER (name), suffix);
559 }
560
561
562 /* Create callgraph node clone with new declaration. The actual body will
563 be copied later at compilation stage.
564
565 TODO: after merging in ipa-sra use function call notes instead of args_to_skip
566 bitmap interface.
567 */
568 cgraph_node *
569 cgraph_node::create_virtual_clone (vec<cgraph_edge *> redirect_callers,
570 vec<ipa_replace_map *, va_gc> *tree_map,
571 bitmap args_to_skip, const char * suffix)
572 {
573 tree old_decl = decl;
574 cgraph_node *new_node = NULL;
575 tree new_decl;
576 size_t len, i;
577 ipa_replace_map *map;
578 char *name;
579
580 if (!in_lto_p)
581 gcc_checking_assert (tree_versionable_function_p (old_decl));
582
583 gcc_assert (local.can_change_signature || !args_to_skip);
584
585 /* Make a new FUNCTION_DECL tree node */
586 if (!args_to_skip)
587 new_decl = copy_node (old_decl);
588 else
589 new_decl = build_function_decl_skip_args (old_decl, args_to_skip, false);
590
591 /* These pointers represent function body and will be populated only when clone
592 is materialized. */
593 gcc_assert (new_decl != old_decl);
594 DECL_STRUCT_FUNCTION (new_decl) = NULL;
595 DECL_ARGUMENTS (new_decl) = NULL;
596 DECL_INITIAL (new_decl) = NULL;
597 DECL_RESULT (new_decl) = NULL;
598 /* We can not do DECL_RESULT (new_decl) = NULL; here because of LTO partitioning
599 sometimes storing only clone decl instead of original. */
600
601 /* Generate a new name for the new version. */
602 len = IDENTIFIER_LENGTH (DECL_NAME (old_decl));
603 name = XALLOCAVEC (char, len + strlen (suffix) + 2);
604 memcpy (name, IDENTIFIER_POINTER (DECL_NAME (old_decl)), len);
605 strcpy (name + len + 1, suffix);
606 name[len] = '.';
607 DECL_NAME (new_decl) = get_identifier (name);
608 SET_DECL_ASSEMBLER_NAME (new_decl, clone_function_name (old_decl, suffix));
609 SET_DECL_RTL (new_decl, NULL);
610
611 new_node = create_clone (new_decl, count, CGRAPH_FREQ_BASE, false,
612 redirect_callers, false, NULL, args_to_skip);
613
614 /* Update the properties.
615 Make clone visible only within this translation unit. Make sure
616 that is not weak also.
617 ??? We cannot use COMDAT linkage because there is no
618 ABI support for this. */
619 set_new_clone_decl_and_node_flags (new_node);
620 new_node->clone.tree_map = tree_map;
621 if (!implicit_section)
622 new_node->set_section (get_section ());
623
624 /* Clones of global symbols or symbols with unique names are unique. */
625 if ((TREE_PUBLIC (old_decl)
626 && !DECL_EXTERNAL (old_decl)
627 && !DECL_WEAK (old_decl)
628 && !DECL_COMDAT (old_decl))
629 || in_lto_p)
630 new_node->unique_name = true;
631 FOR_EACH_VEC_SAFE_ELT (tree_map, i, map)
632 new_node->maybe_create_reference (map->new_tree, IPA_REF_ADDR, NULL);
633
634 if (ipa_transforms_to_apply.exists ())
635 new_node->ipa_transforms_to_apply
636 = ipa_transforms_to_apply.copy ();
637
638 symtab->call_cgraph_duplication_hooks (this, new_node);
639
640 return new_node;
641 }
642
643 /* callgraph node being removed from symbol table; see if its entry can be
644 replaced by other inline clone. */
645 cgraph_node *
646 cgraph_node::find_replacement (void)
647 {
648 cgraph_node *next_inline_clone, *replacement;
649
650 for (next_inline_clone = clones;
651 next_inline_clone
652 && next_inline_clone->decl != decl;
653 next_inline_clone = next_inline_clone->next_sibling_clone)
654 ;
655
656 /* If there is inline clone of the node being removed, we need
657 to put it into the position of removed node and reorganize all
658 other clones to be based on it. */
659 if (next_inline_clone)
660 {
661 cgraph_node *n;
662 cgraph_node *new_clones;
663
664 replacement = next_inline_clone;
665
666 /* Unlink inline clone from the list of clones of removed node. */
667 if (next_inline_clone->next_sibling_clone)
668 next_inline_clone->next_sibling_clone->prev_sibling_clone
669 = next_inline_clone->prev_sibling_clone;
670 if (next_inline_clone->prev_sibling_clone)
671 {
672 gcc_assert (clones != next_inline_clone);
673 next_inline_clone->prev_sibling_clone->next_sibling_clone
674 = next_inline_clone->next_sibling_clone;
675 }
676 else
677 {
678 gcc_assert (clones == next_inline_clone);
679 clones = next_inline_clone->next_sibling_clone;
680 }
681
682 new_clones = clones;
683 clones = NULL;
684
685 /* Copy clone info. */
686 next_inline_clone->clone = clone;
687
688 /* Now place it into clone tree at same level at NODE. */
689 next_inline_clone->clone_of = clone_of;
690 next_inline_clone->prev_sibling_clone = NULL;
691 next_inline_clone->next_sibling_clone = NULL;
692 if (clone_of)
693 {
694 if (clone_of->clones)
695 clone_of->clones->prev_sibling_clone = next_inline_clone;
696 next_inline_clone->next_sibling_clone = clone_of->clones;
697 clone_of->clones = next_inline_clone;
698 }
699
700 /* Merge the clone list. */
701 if (new_clones)
702 {
703 if (!next_inline_clone->clones)
704 next_inline_clone->clones = new_clones;
705 else
706 {
707 n = next_inline_clone->clones;
708 while (n->next_sibling_clone)
709 n = n->next_sibling_clone;
710 n->next_sibling_clone = new_clones;
711 new_clones->prev_sibling_clone = n;
712 }
713 }
714
715 /* Update clone_of pointers. */
716 n = new_clones;
717 while (n)
718 {
719 n->clone_of = next_inline_clone;
720 n = n->next_sibling_clone;
721 }
722 return replacement;
723 }
724 else
725 return NULL;
726 }
727
728 /* Like cgraph_set_call_stmt but walk the clone tree and update all
729 clones sharing the same function body.
730 When WHOLE_SPECULATIVE_EDGES is true, all three components of
731 speculative edge gets updated. Otherwise we update only direct
732 call. */
733
734 void
735 cgraph_node::set_call_stmt_including_clones (gimple old_stmt,
736 gcall *new_stmt,
737 bool update_speculative)
738 {
739 cgraph_node *node;
740 cgraph_edge *edge = get_edge (old_stmt);
741
742 if (edge)
743 edge->set_call_stmt (new_stmt, update_speculative);
744
745 node = clones;
746 if (node)
747 while (node != this)
748 {
749 cgraph_edge *edge = node->get_edge (old_stmt);
750 if (edge)
751 {
752 edge->set_call_stmt (new_stmt, update_speculative);
753 /* If UPDATE_SPECULATIVE is false, it means that we are turning
754 speculative call into a real code sequence. Update the
755 callgraph edges. */
756 if (edge->speculative && !update_speculative)
757 {
758 cgraph_edge *direct, *indirect;
759 ipa_ref *ref;
760
761 gcc_assert (!edge->indirect_unknown_callee);
762 edge->speculative_call_info (direct, indirect, ref);
763 direct->speculative = false;
764 indirect->speculative = false;
765 ref->speculative = false;
766 }
767 }
768 if (node->clones)
769 node = node->clones;
770 else if (node->next_sibling_clone)
771 node = node->next_sibling_clone;
772 else
773 {
774 while (node != this && !node->next_sibling_clone)
775 node = node->clone_of;
776 if (node != this)
777 node = node->next_sibling_clone;
778 }
779 }
780 }
781
782 /* Like cgraph_create_edge walk the clone tree and update all clones sharing
783 same function body. If clones already have edge for OLD_STMT; only
784 update the edge same way as cgraph_set_call_stmt_including_clones does.
785
786 TODO: COUNT and LOOP_DEPTH should be properly distributed based on relative
787 frequencies of the clones. */
788
789 void
790 cgraph_node::create_edge_including_clones (cgraph_node *callee,
791 gimple old_stmt, gcall *stmt,
792 gcov_type count,
793 int freq,
794 cgraph_inline_failed_t reason)
795 {
796 cgraph_node *node;
797 cgraph_edge *edge;
798
799 if (!get_edge (stmt))
800 {
801 edge = create_edge (callee, stmt, count, freq);
802 edge->inline_failed = reason;
803 }
804
805 node = clones;
806 if (node)
807 while (node != this)
808 {
809 cgraph_edge *edge = node->get_edge (old_stmt);
810
811 /* It is possible that clones already contain the edge while
812 master didn't. Either we promoted indirect call into direct
813 call in the clone or we are processing clones of unreachable
814 master where edges has been removed. */
815 if (edge)
816 edge->set_call_stmt (stmt);
817 else if (! node->get_edge (stmt))
818 {
819 edge = node->create_edge (callee, stmt, count, freq);
820 edge->inline_failed = reason;
821 }
822
823 if (node->clones)
824 node = node->clones;
825 else if (node->next_sibling_clone)
826 node = node->next_sibling_clone;
827 else
828 {
829 while (node != this && !node->next_sibling_clone)
830 node = node->clone_of;
831 if (node != this)
832 node = node->next_sibling_clone;
833 }
834 }
835 }
836
837 /* Remove the node from cgraph and all inline clones inlined into it.
838 Skip however removal of FORBIDDEN_NODE and return true if it needs to be
839 removed. This allows to call the function from outer loop walking clone
840 tree. */
841
842 bool
843 cgraph_node::remove_symbol_and_inline_clones (cgraph_node *forbidden_node)
844 {
845 cgraph_edge *e, *next;
846 bool found = false;
847
848 if (this == forbidden_node)
849 {
850 callers->remove ();
851 return true;
852 }
853 for (e = callees; e; e = next)
854 {
855 next = e->next_callee;
856 if (!e->inline_failed)
857 found |= e->callee->remove_symbol_and_inline_clones (forbidden_node);
858 }
859 remove ();
860 return found;
861 }
862
863 /* The edges representing the callers of the NEW_VERSION node were
864 fixed by cgraph_function_versioning (), now the call_expr in their
865 respective tree code should be updated to call the NEW_VERSION. */
866
867 static void
868 update_call_expr (cgraph_node *new_version)
869 {
870 cgraph_edge *e;
871
872 gcc_assert (new_version);
873
874 /* Update the call expr on the edges to call the new version. */
875 for (e = new_version->callers; e; e = e->next_caller)
876 {
877 function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
878 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
879 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
880 }
881 }
882
883
884 /* Create a new cgraph node which is the new version of
885 callgraph node. REDIRECT_CALLERS holds the callers
886 edges which should be redirected to point to
887 NEW_VERSION. ALL the callees edges of the node
888 are cloned to the new version node. Return the new
889 version node.
890
891 If non-NULL BLOCK_TO_COPY determine what basic blocks
892 was copied to prevent duplications of calls that are dead
893 in the clone. */
894
895 cgraph_node *
896 cgraph_node::create_version_clone (tree new_decl,
897 vec<cgraph_edge *> redirect_callers,
898 bitmap bbs_to_copy)
899 {
900 cgraph_node *new_version;
901 cgraph_edge *e;
902 unsigned i;
903
904 new_version = cgraph_node::create (new_decl);
905
906 new_version->analyzed = analyzed;
907 new_version->definition = definition;
908 new_version->local = local;
909 new_version->externally_visible = false;
910 new_version->no_reorder = no_reorder;
911 new_version->local.local = new_version->definition;
912 new_version->global = global;
913 new_version->rtl = rtl;
914 new_version->count = count;
915
916 for (e = callees; e; e=e->next_callee)
917 if (!bbs_to_copy
918 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
919 e->clone (new_version, e->call_stmt,
920 e->lto_stmt_uid, REG_BR_PROB_BASE,
921 CGRAPH_FREQ_BASE,
922 true);
923 for (e = indirect_calls; e; e=e->next_callee)
924 if (!bbs_to_copy
925 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
926 e->clone (new_version, e->call_stmt,
927 e->lto_stmt_uid, REG_BR_PROB_BASE,
928 CGRAPH_FREQ_BASE,
929 true);
930 FOR_EACH_VEC_ELT (redirect_callers, i, e)
931 {
932 /* Redirect calls to the old version node to point to its new
933 version. */
934 e->redirect_callee (new_version);
935 }
936
937 symtab->call_cgraph_duplication_hooks (this, new_version);
938
939 return new_version;
940 }
941
942 /* Perform function versioning.
943 Function versioning includes copying of the tree and
944 a callgraph update (creating a new cgraph node and updating
945 its callees and callers).
946
947 REDIRECT_CALLERS varray includes the edges to be redirected
948 to the new version.
949
950 TREE_MAP is a mapping of tree nodes we want to replace with
951 new ones (according to results of prior analysis).
952
953 If non-NULL ARGS_TO_SKIP determine function parameters to remove
954 from new version.
955 If SKIP_RETURN is true, the new version will return void.
956 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
957 If non_NULL NEW_ENTRY determine new entry BB of the clone.
958
959 Return the new version's cgraph node. */
960
961 cgraph_node *
962 cgraph_node::create_version_clone_with_body
963 (vec<cgraph_edge *> redirect_callers,
964 vec<ipa_replace_map *, va_gc> *tree_map, bitmap args_to_skip,
965 bool skip_return, bitmap bbs_to_copy, basic_block new_entry_block,
966 const char *clone_name)
967 {
968 tree old_decl = decl;
969 cgraph_node *new_version_node = NULL;
970 tree new_decl;
971
972 if (!tree_versionable_function_p (old_decl))
973 return NULL;
974
975 gcc_assert (local.can_change_signature || !args_to_skip);
976
977 /* Make a new FUNCTION_DECL tree node for the new version. */
978 if (!args_to_skip && !skip_return)
979 new_decl = copy_node (old_decl);
980 else
981 new_decl
982 = build_function_decl_skip_args (old_decl, args_to_skip, skip_return);
983
984 /* Generate a new name for the new version. */
985 DECL_NAME (new_decl) = clone_function_name (old_decl, clone_name);
986 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
987 SET_DECL_RTL (new_decl, NULL);
988
989 /* When the old decl was a con-/destructor make sure the clone isn't. */
990 DECL_STATIC_CONSTRUCTOR (new_decl) = 0;
991 DECL_STATIC_DESTRUCTOR (new_decl) = 0;
992
993 /* Create the new version's call-graph node.
994 and update the edges of the new node. */
995 new_version_node = create_version_clone (new_decl, redirect_callers,
996 bbs_to_copy);
997
998 if (ipa_transforms_to_apply.exists ())
999 new_version_node->ipa_transforms_to_apply
1000 = ipa_transforms_to_apply.copy ();
1001 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1002 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip,
1003 skip_return, bbs_to_copy, new_entry_block);
1004
1005 /* Update the new version's properties.
1006 Make The new version visible only within this translation unit. Make sure
1007 that is not weak also.
1008 ??? We cannot use COMDAT linkage because there is no
1009 ABI support for this. */
1010 new_version_node->make_decl_local ();
1011 DECL_VIRTUAL_P (new_version_node->decl) = 0;
1012 new_version_node->externally_visible = 0;
1013 new_version_node->local.local = 1;
1014 new_version_node->lowered = true;
1015 if (!implicit_section)
1016 new_version_node->set_section (get_section ());
1017 /* Clones of global symbols or symbols with unique names are unique. */
1018 if ((TREE_PUBLIC (old_decl)
1019 && !DECL_EXTERNAL (old_decl)
1020 && !DECL_WEAK (old_decl)
1021 && !DECL_COMDAT (old_decl))
1022 || in_lto_p)
1023 new_version_node->unique_name = true;
1024
1025 /* Update the call_expr on the edges to call the new version node. */
1026 update_call_expr (new_version_node);
1027
1028 symtab->call_cgraph_insertion_hooks (this);
1029 return new_version_node;
1030 }
1031
1032 /* Given virtual clone, turn it into actual clone. */
1033
1034 static void
1035 cgraph_materialize_clone (cgraph_node *node)
1036 {
1037 bitmap_obstack_initialize (NULL);
1038 node->former_clone_of = node->clone_of->decl;
1039 if (node->clone_of->former_clone_of)
1040 node->former_clone_of = node->clone_of->former_clone_of;
1041 /* Copy the OLD_VERSION_NODE function tree to the new version. */
1042 tree_function_versioning (node->clone_of->decl, node->decl,
1043 node->clone.tree_map, true,
1044 node->clone.args_to_skip, false,
1045 NULL, NULL);
1046 if (symtab->dump_file)
1047 {
1048 dump_function_to_file (node->clone_of->decl, symtab->dump_file,
1049 dump_flags);
1050 dump_function_to_file (node->decl, symtab->dump_file, dump_flags);
1051 }
1052
1053 /* Function is no longer clone. */
1054 if (node->next_sibling_clone)
1055 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
1056 if (node->prev_sibling_clone)
1057 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
1058 else
1059 node->clone_of->clones = node->next_sibling_clone;
1060 node->next_sibling_clone = NULL;
1061 node->prev_sibling_clone = NULL;
1062 if (!node->clone_of->analyzed && !node->clone_of->clones)
1063 {
1064 node->clone_of->release_body ();
1065 node->clone_of->remove_callees ();
1066 node->clone_of->remove_all_references ();
1067 }
1068 node->clone_of = NULL;
1069 bitmap_obstack_release (NULL);
1070 }
1071
1072 /* Once all functions from compilation unit are in memory, produce all clones
1073 and update all calls. We might also do this on demand if we don't want to
1074 bring all functions to memory prior compilation, but current WHOPR
1075 implementation does that and it is is bit easier to keep everything right in
1076 this order. */
1077
1078 void
1079 symbol_table::materialize_all_clones (void)
1080 {
1081 cgraph_node *node;
1082 bool stabilized = false;
1083
1084
1085 if (symtab->dump_file)
1086 fprintf (symtab->dump_file, "Materializing clones\n");
1087 #ifdef ENABLE_CHECKING
1088 cgraph_node::verify_cgraph_nodes ();
1089 #endif
1090
1091 /* We can also do topological order, but number of iterations should be
1092 bounded by number of IPA passes since single IPA pass is probably not
1093 going to create clones of clones it created itself. */
1094 while (!stabilized)
1095 {
1096 stabilized = true;
1097 FOR_EACH_FUNCTION (node)
1098 {
1099 if (node->clone_of && node->decl != node->clone_of->decl
1100 && !gimple_has_body_p (node->decl))
1101 {
1102 if (!node->clone_of->clone_of)
1103 node->clone_of->get_untransformed_body ();
1104 if (gimple_has_body_p (node->clone_of->decl))
1105 {
1106 if (symtab->dump_file)
1107 {
1108 fprintf (symtab->dump_file, "cloning %s to %s\n",
1109 xstrdup_for_dump (node->clone_of->name ()),
1110 xstrdup_for_dump (node->name ()));
1111 if (node->clone.tree_map)
1112 {
1113 unsigned int i;
1114 fprintf (symtab->dump_file, " replace map: ");
1115 for (i = 0;
1116 i < vec_safe_length (node->clone.tree_map);
1117 i++)
1118 {
1119 ipa_replace_map *replace_info;
1120 replace_info = (*node->clone.tree_map)[i];
1121 print_generic_expr (symtab->dump_file, replace_info->old_tree, 0);
1122 fprintf (symtab->dump_file, " -> ");
1123 print_generic_expr (symtab->dump_file, replace_info->new_tree, 0);
1124 fprintf (symtab->dump_file, "%s%s;",
1125 replace_info->replace_p ? "(replace)":"",
1126 replace_info->ref_p ? "(ref)":"");
1127 }
1128 fprintf (symtab->dump_file, "\n");
1129 }
1130 if (node->clone.args_to_skip)
1131 {
1132 fprintf (symtab->dump_file, " args_to_skip: ");
1133 dump_bitmap (symtab->dump_file,
1134 node->clone.args_to_skip);
1135 }
1136 if (node->clone.args_to_skip)
1137 {
1138 fprintf (symtab->dump_file, " combined_args_to_skip:");
1139 dump_bitmap (symtab->dump_file, node->clone.combined_args_to_skip);
1140 }
1141 }
1142 cgraph_materialize_clone (node);
1143 stabilized = false;
1144 }
1145 }
1146 }
1147 }
1148 FOR_EACH_FUNCTION (node)
1149 if (!node->analyzed && node->callees)
1150 {
1151 node->remove_callees ();
1152 node->remove_all_references ();
1153 }
1154 else
1155 node->clear_stmts_in_references ();
1156 if (symtab->dump_file)
1157 fprintf (symtab->dump_file, "Materialization Call site updates done.\n");
1158 #ifdef ENABLE_CHECKING
1159 cgraph_node::verify_cgraph_nodes ();
1160 #endif
1161 symtab->remove_unreachable_nodes (symtab->dump_file);
1162 }
1163
1164 #include "gt-cgraphclones.h"