Remove cgraph_local_info structure.
[gcc.git] / gcc / cgraph.c
1 /* Callgraph handling code.
2 Copyright (C) 2003-2019 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This file contains basic routines manipulating call graph
22
23 The call-graph is a data structure designed for inter-procedural
24 optimization. It represents a multi-graph where nodes are functions
25 (symbols within symbol table) and edges are call sites. */
26
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "backend.h"
31 #include "target.h"
32 #include "rtl.h"
33 #include "tree.h"
34 #include "gimple.h"
35 #include "predict.h"
36 #include "alloc-pool.h"
37 #include "gimple-ssa.h"
38 #include "cgraph.h"
39 #include "lto-streamer.h"
40 #include "fold-const.h"
41 #include "varasm.h"
42 #include "calls.h"
43 #include "print-tree.h"
44 #include "langhooks.h"
45 #include "intl.h"
46 #include "tree-eh.h"
47 #include "gimple-iterator.h"
48 #include "tree-cfg.h"
49 #include "tree-ssa.h"
50 #include "value-prof.h"
51 #include "ipa-utils.h"
52 #include "symbol-summary.h"
53 #include "tree-vrp.h"
54 #include "ipa-prop.h"
55 #include "ipa-fnsummary.h"
56 #include "cfgloop.h"
57 #include "gimple-pretty-print.h"
58 #include "tree-dfa.h"
59 #include "profile.h"
60 #include "params.h"
61 #include "context.h"
62 #include "gimplify.h"
63 #include "stringpool.h"
64 #include "attribs.h"
65 #include "selftest.h"
66
67 /* FIXME: Only for PROP_loops, but cgraph shouldn't have to know about this. */
68 #include "tree-pass.h"
69
70 /* Queue of cgraph nodes scheduled to be lowered. */
71 symtab_node *x_cgraph_nodes_queue;
72 #define cgraph_nodes_queue ((cgraph_node *)x_cgraph_nodes_queue)
73
74 /* Symbol table global context. */
75 symbol_table *symtab;
76
77 /* List of hooks triggered on cgraph_edge events. */
78 struct cgraph_edge_hook_list {
79 cgraph_edge_hook hook;
80 void *data;
81 struct cgraph_edge_hook_list *next;
82 };
83
84 /* List of hooks triggered on cgraph_node events. */
85 struct cgraph_node_hook_list {
86 cgraph_node_hook hook;
87 void *data;
88 struct cgraph_node_hook_list *next;
89 };
90
91 /* List of hooks triggered on events involving two cgraph_edges. */
92 struct cgraph_2edge_hook_list {
93 cgraph_2edge_hook hook;
94 void *data;
95 struct cgraph_2edge_hook_list *next;
96 };
97
98 /* List of hooks triggered on events involving two cgraph_nodes. */
99 struct cgraph_2node_hook_list {
100 cgraph_2node_hook hook;
101 void *data;
102 struct cgraph_2node_hook_list *next;
103 };
104
105 /* Hash descriptor for cgraph_function_version_info. */
106
107 struct function_version_hasher : ggc_ptr_hash<cgraph_function_version_info>
108 {
109 static hashval_t hash (cgraph_function_version_info *);
110 static bool equal (cgraph_function_version_info *,
111 cgraph_function_version_info *);
112 };
113
114 /* Map a cgraph_node to cgraph_function_version_info using this htab.
115 The cgraph_function_version_info has a THIS_NODE field that is the
116 corresponding cgraph_node.. */
117
118 static GTY(()) hash_table<function_version_hasher> *cgraph_fnver_htab = NULL;
119
120 /* Hash function for cgraph_fnver_htab. */
121 hashval_t
122 function_version_hasher::hash (cgraph_function_version_info *ptr)
123 {
124 int uid = ptr->this_node->get_uid ();
125 return (hashval_t)(uid);
126 }
127
128 /* eq function for cgraph_fnver_htab. */
129 bool
130 function_version_hasher::equal (cgraph_function_version_info *n1,
131 cgraph_function_version_info *n2)
132 {
133 return n1->this_node->get_uid () == n2->this_node->get_uid ();
134 }
135
136 /* Mark as GC root all allocated nodes. */
137 static GTY(()) struct cgraph_function_version_info *
138 version_info_node = NULL;
139
140 /* Return true if NODE's address can be compared. */
141
142 bool
143 symtab_node::address_can_be_compared_p ()
144 {
145 /* Address of virtual tables and functions is never compared. */
146 if (DECL_VIRTUAL_P (decl))
147 return false;
148 /* Address of C++ cdtors is never compared. */
149 if (is_a <cgraph_node *> (this)
150 && (DECL_CXX_CONSTRUCTOR_P (decl)
151 || DECL_CXX_DESTRUCTOR_P (decl)))
152 return false;
153 /* Constant pool symbols addresses are never compared.
154 flag_merge_constants permits us to assume the same on readonly vars. */
155 if (is_a <varpool_node *> (this)
156 && (DECL_IN_CONSTANT_POOL (decl)
157 || (flag_merge_constants >= 2
158 && TREE_READONLY (decl) && !TREE_THIS_VOLATILE (decl))))
159 return false;
160 return true;
161 }
162
163 /* Get the cgraph_function_version_info node corresponding to node. */
164 cgraph_function_version_info *
165 cgraph_node::function_version (void)
166 {
167 cgraph_function_version_info key;
168 key.this_node = this;
169
170 if (cgraph_fnver_htab == NULL)
171 return NULL;
172
173 return cgraph_fnver_htab->find (&key);
174 }
175
176 /* Insert a new cgraph_function_version_info node into cgraph_fnver_htab
177 corresponding to cgraph_node NODE. */
178 cgraph_function_version_info *
179 cgraph_node::insert_new_function_version (void)
180 {
181 version_info_node = NULL;
182 version_info_node = ggc_cleared_alloc<cgraph_function_version_info> ();
183 version_info_node->this_node = this;
184
185 if (cgraph_fnver_htab == NULL)
186 cgraph_fnver_htab = hash_table<function_version_hasher>::create_ggc (2);
187
188 *cgraph_fnver_htab->find_slot (version_info_node, INSERT)
189 = version_info_node;
190 return version_info_node;
191 }
192
193 /* Remove the cgraph_function_version_info node given by DECL_V. */
194 static void
195 delete_function_version (cgraph_function_version_info *decl_v)
196 {
197 if (decl_v == NULL)
198 return;
199
200 if (decl_v->prev != NULL)
201 decl_v->prev->next = decl_v->next;
202
203 if (decl_v->next != NULL)
204 decl_v->next->prev = decl_v->prev;
205
206 if (cgraph_fnver_htab != NULL)
207 cgraph_fnver_htab->remove_elt (decl_v);
208 }
209
210 /* Remove the cgraph_function_version_info and cgraph_node for DECL. This
211 DECL is a duplicate declaration. */
212 void
213 cgraph_node::delete_function_version_by_decl (tree decl)
214 {
215 cgraph_node *decl_node = cgraph_node::get (decl);
216
217 if (decl_node == NULL)
218 return;
219
220 delete_function_version (decl_node->function_version ());
221
222 decl_node->remove ();
223 }
224
225 /* Record that DECL1 and DECL2 are semantically identical function
226 versions. */
227 void
228 cgraph_node::record_function_versions (tree decl1, tree decl2)
229 {
230 cgraph_node *decl1_node = cgraph_node::get_create (decl1);
231 cgraph_node *decl2_node = cgraph_node::get_create (decl2);
232 cgraph_function_version_info *decl1_v = NULL;
233 cgraph_function_version_info *decl2_v = NULL;
234 cgraph_function_version_info *before;
235 cgraph_function_version_info *after;
236
237 gcc_assert (decl1_node != NULL && decl2_node != NULL);
238 decl1_v = decl1_node->function_version ();
239 decl2_v = decl2_node->function_version ();
240
241 if (decl1_v != NULL && decl2_v != NULL)
242 return;
243
244 if (decl1_v == NULL)
245 decl1_v = decl1_node->insert_new_function_version ();
246
247 if (decl2_v == NULL)
248 decl2_v = decl2_node->insert_new_function_version ();
249
250 /* Chain decl2_v and decl1_v. All semantically identical versions
251 will be chained together. */
252
253 before = decl1_v;
254 after = decl2_v;
255
256 while (before->next != NULL)
257 before = before->next;
258
259 while (after->prev != NULL)
260 after= after->prev;
261
262 before->next = after;
263 after->prev = before;
264 }
265
266 /* Initialize callgraph dump file. */
267
268 void
269 symbol_table::initialize (void)
270 {
271 if (!dump_file)
272 dump_file = dump_begin (TDI_cgraph, NULL);
273
274 if (!ipa_clones_dump_file)
275 ipa_clones_dump_file = dump_begin (TDI_clones, NULL);
276 }
277
278 /* Allocate new callgraph node and insert it into basic data structures. */
279
280 cgraph_node *
281 symbol_table::create_empty (void)
282 {
283 cgraph_node *node = allocate_cgraph_symbol ();
284
285 node->type = SYMTAB_FUNCTION;
286 node->frequency = NODE_FREQUENCY_NORMAL;
287 node->count_materialization_scale = REG_BR_PROB_BASE;
288 cgraph_count++;
289
290 return node;
291 }
292
293 /* Register HOOK to be called with DATA on each removed edge. */
294 cgraph_edge_hook_list *
295 symbol_table::add_edge_removal_hook (cgraph_edge_hook hook, void *data)
296 {
297 cgraph_edge_hook_list *entry;
298 cgraph_edge_hook_list **ptr = &m_first_edge_removal_hook;
299
300 entry = (cgraph_edge_hook_list *) xmalloc (sizeof (*entry));
301 entry->hook = hook;
302 entry->data = data;
303 entry->next = NULL;
304 while (*ptr)
305 ptr = &(*ptr)->next;
306 *ptr = entry;
307 return entry;
308 }
309
310 /* Remove ENTRY from the list of hooks called on removing edges. */
311 void
312 symbol_table::remove_edge_removal_hook (cgraph_edge_hook_list *entry)
313 {
314 cgraph_edge_hook_list **ptr = &m_first_edge_removal_hook;
315
316 while (*ptr != entry)
317 ptr = &(*ptr)->next;
318 *ptr = entry->next;
319 free (entry);
320 }
321
322 /* Call all edge removal hooks. */
323 void
324 symbol_table::call_edge_removal_hooks (cgraph_edge *e)
325 {
326 cgraph_edge_hook_list *entry = m_first_edge_removal_hook;
327 while (entry)
328 {
329 entry->hook (e, entry->data);
330 entry = entry->next;
331 }
332 }
333
334 /* Register HOOK to be called with DATA on each removed node. */
335 cgraph_node_hook_list *
336 symbol_table::add_cgraph_removal_hook (cgraph_node_hook hook, void *data)
337 {
338 cgraph_node_hook_list *entry;
339 cgraph_node_hook_list **ptr = &m_first_cgraph_removal_hook;
340
341 entry = (cgraph_node_hook_list *) xmalloc (sizeof (*entry));
342 entry->hook = hook;
343 entry->data = data;
344 entry->next = NULL;
345 while (*ptr)
346 ptr = &(*ptr)->next;
347 *ptr = entry;
348 return entry;
349 }
350
351 /* Remove ENTRY from the list of hooks called on removing nodes. */
352 void
353 symbol_table::remove_cgraph_removal_hook (cgraph_node_hook_list *entry)
354 {
355 cgraph_node_hook_list **ptr = &m_first_cgraph_removal_hook;
356
357 while (*ptr != entry)
358 ptr = &(*ptr)->next;
359 *ptr = entry->next;
360 free (entry);
361 }
362
363 /* Call all node removal hooks. */
364 void
365 symbol_table::call_cgraph_removal_hooks (cgraph_node *node)
366 {
367 cgraph_node_hook_list *entry = m_first_cgraph_removal_hook;
368 while (entry)
369 {
370 entry->hook (node, entry->data);
371 entry = entry->next;
372 }
373 }
374
375 /* Call all node removal hooks. */
376 void
377 symbol_table::call_cgraph_insertion_hooks (cgraph_node *node)
378 {
379 cgraph_node_hook_list *entry = m_first_cgraph_insertion_hook;
380 while (entry)
381 {
382 entry->hook (node, entry->data);
383 entry = entry->next;
384 }
385 }
386
387
388 /* Register HOOK to be called with DATA on each inserted node. */
389 cgraph_node_hook_list *
390 symbol_table::add_cgraph_insertion_hook (cgraph_node_hook hook, void *data)
391 {
392 cgraph_node_hook_list *entry;
393 cgraph_node_hook_list **ptr = &m_first_cgraph_insertion_hook;
394
395 entry = (cgraph_node_hook_list *) xmalloc (sizeof (*entry));
396 entry->hook = hook;
397 entry->data = data;
398 entry->next = NULL;
399 while (*ptr)
400 ptr = &(*ptr)->next;
401 *ptr = entry;
402 return entry;
403 }
404
405 /* Remove ENTRY from the list of hooks called on inserted nodes. */
406 void
407 symbol_table::remove_cgraph_insertion_hook (cgraph_node_hook_list *entry)
408 {
409 cgraph_node_hook_list **ptr = &m_first_cgraph_insertion_hook;
410
411 while (*ptr != entry)
412 ptr = &(*ptr)->next;
413 *ptr = entry->next;
414 free (entry);
415 }
416
417 /* Register HOOK to be called with DATA on each duplicated edge. */
418 cgraph_2edge_hook_list *
419 symbol_table::add_edge_duplication_hook (cgraph_2edge_hook hook, void *data)
420 {
421 cgraph_2edge_hook_list *entry;
422 cgraph_2edge_hook_list **ptr = &m_first_edge_duplicated_hook;
423
424 entry = (cgraph_2edge_hook_list *) xmalloc (sizeof (*entry));
425 entry->hook = hook;
426 entry->data = data;
427 entry->next = NULL;
428 while (*ptr)
429 ptr = &(*ptr)->next;
430 *ptr = entry;
431 return entry;
432 }
433
434 /* Remove ENTRY from the list of hooks called on duplicating edges. */
435 void
436 symbol_table::remove_edge_duplication_hook (cgraph_2edge_hook_list *entry)
437 {
438 cgraph_2edge_hook_list **ptr = &m_first_edge_duplicated_hook;
439
440 while (*ptr != entry)
441 ptr = &(*ptr)->next;
442 *ptr = entry->next;
443 free (entry);
444 }
445
446 /* Call all edge duplication hooks. */
447 void
448 symbol_table::call_edge_duplication_hooks (cgraph_edge *cs1, cgraph_edge *cs2)
449 {
450 cgraph_2edge_hook_list *entry = m_first_edge_duplicated_hook;
451 while (entry)
452 {
453 entry->hook (cs1, cs2, entry->data);
454 entry = entry->next;
455 }
456 }
457
458 /* Register HOOK to be called with DATA on each duplicated node. */
459 cgraph_2node_hook_list *
460 symbol_table::add_cgraph_duplication_hook (cgraph_2node_hook hook, void *data)
461 {
462 cgraph_2node_hook_list *entry;
463 cgraph_2node_hook_list **ptr = &m_first_cgraph_duplicated_hook;
464
465 entry = (cgraph_2node_hook_list *) xmalloc (sizeof (*entry));
466 entry->hook = hook;
467 entry->data = data;
468 entry->next = NULL;
469 while (*ptr)
470 ptr = &(*ptr)->next;
471 *ptr = entry;
472 return entry;
473 }
474
475 /* Remove ENTRY from the list of hooks called on duplicating nodes. */
476 void
477 symbol_table::remove_cgraph_duplication_hook (cgraph_2node_hook_list *entry)
478 {
479 cgraph_2node_hook_list **ptr = &m_first_cgraph_duplicated_hook;
480
481 while (*ptr != entry)
482 ptr = &(*ptr)->next;
483 *ptr = entry->next;
484 free (entry);
485 }
486
487 /* Call all node duplication hooks. */
488 void
489 symbol_table::call_cgraph_duplication_hooks (cgraph_node *node,
490 cgraph_node *node2)
491 {
492 cgraph_2node_hook_list *entry = m_first_cgraph_duplicated_hook;
493 while (entry)
494 {
495 entry->hook (node, node2, entry->data);
496 entry = entry->next;
497 }
498 }
499
500 /* Return cgraph node assigned to DECL. Create new one when needed. */
501
502 cgraph_node *
503 cgraph_node::create (tree decl)
504 {
505 cgraph_node *node = symtab->create_empty ();
506 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
507
508 node->decl = decl;
509
510 node->count = profile_count::uninitialized ();
511
512 if ((flag_openacc || flag_openmp)
513 && lookup_attribute ("omp declare target", DECL_ATTRIBUTES (decl)))
514 {
515 node->offloadable = 1;
516 if (ENABLE_OFFLOADING)
517 g->have_offload = true;
518 }
519
520 if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (decl)))
521 node->ifunc_resolver = true;
522
523 node->register_symbol ();
524
525 if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
526 {
527 node->origin = cgraph_node::get_create (DECL_CONTEXT (decl));
528 node->next_nested = node->origin->nested;
529 node->origin->nested = node;
530 }
531 return node;
532 }
533
534 /* Try to find a call graph node for declaration DECL and if it does not exist
535 or if it corresponds to an inline clone, create a new one. */
536
537 cgraph_node *
538 cgraph_node::get_create (tree decl)
539 {
540 cgraph_node *first_clone = cgraph_node::get (decl);
541
542 if (first_clone && !first_clone->inlined_to)
543 return first_clone;
544
545 cgraph_node *node = cgraph_node::create (decl);
546 if (first_clone)
547 {
548 first_clone->clone_of = node;
549 node->clones = first_clone;
550 symtab->symtab_prevail_in_asm_name_hash (node);
551 node->decl->decl_with_vis.symtab_node = node;
552 if (dump_file)
553 fprintf (dump_file, "Introduced new external node "
554 "(%s) and turned into root of the clone tree.\n",
555 node->dump_name ());
556 }
557 else if (dump_file)
558 fprintf (dump_file, "Introduced new external node "
559 "(%s).\n", node->dump_name ());
560 return node;
561 }
562
563 /* Mark ALIAS as an alias to DECL. DECL_NODE is cgraph node representing
564 the function body is associated with (not necessarily cgraph_node (DECL). */
565
566 cgraph_node *
567 cgraph_node::create_alias (tree alias, tree target)
568 {
569 cgraph_node *alias_node;
570
571 gcc_assert (TREE_CODE (target) == FUNCTION_DECL
572 || TREE_CODE (target) == IDENTIFIER_NODE);
573 gcc_assert (TREE_CODE (alias) == FUNCTION_DECL);
574 alias_node = cgraph_node::get_create (alias);
575 gcc_assert (!alias_node->definition);
576 alias_node->alias_target = target;
577 alias_node->definition = true;
578 alias_node->alias = true;
579 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (alias)) != NULL)
580 alias_node->transparent_alias = alias_node->weakref = true;
581 if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (alias)))
582 alias_node->ifunc_resolver = true;
583 return alias_node;
584 }
585
586 /* Attempt to mark ALIAS as an alias to DECL. Return alias node if successful
587 and NULL otherwise.
588 Same body aliases are output whenever the body of DECL is output,
589 and cgraph_node::get (ALIAS) transparently returns
590 cgraph_node::get (DECL). */
591
592 cgraph_node *
593 cgraph_node::create_same_body_alias (tree alias, tree decl)
594 {
595 cgraph_node *n;
596
597 /* If aliases aren't supported by the assembler, fail. */
598 if (!TARGET_SUPPORTS_ALIASES)
599 return NULL;
600
601 /* Langhooks can create same body aliases of symbols not defined.
602 Those are useless. Drop them on the floor. */
603 if (symtab->global_info_ready)
604 return NULL;
605
606 n = cgraph_node::create_alias (alias, decl);
607 n->cpp_implicit_alias = true;
608 if (symtab->cpp_implicit_aliases_done)
609 n->resolve_alias (cgraph_node::get (decl));
610 return n;
611 }
612
613 /* Add thunk alias into callgraph. The alias declaration is ALIAS and it
614 aliases DECL with an adjustments made into the first parameter.
615 See comments in struct cgraph_thunk_info for detail on the parameters. */
616
617 cgraph_node *
618 cgraph_node::create_thunk (tree alias, tree, bool this_adjusting,
619 HOST_WIDE_INT fixed_offset,
620 HOST_WIDE_INT virtual_value,
621 HOST_WIDE_INT indirect_offset,
622 tree virtual_offset,
623 tree real_alias)
624 {
625 cgraph_node *node;
626
627 node = cgraph_node::get (alias);
628 if (node)
629 node->reset ();
630 else
631 node = cgraph_node::create (alias);
632
633 /* Make sure that if VIRTUAL_OFFSET is in sync with VIRTUAL_VALUE. */
634 gcc_checking_assert (virtual_offset
635 ? virtual_value == wi::to_wide (virtual_offset)
636 : virtual_value == 0);
637
638 node->thunk.fixed_offset = fixed_offset;
639 node->thunk.virtual_value = virtual_value;
640 node->thunk.indirect_offset = indirect_offset;
641 node->thunk.alias = real_alias;
642 node->thunk.this_adjusting = this_adjusting;
643 node->thunk.virtual_offset_p = virtual_offset != NULL;
644 node->thunk.thunk_p = true;
645 node->definition = true;
646
647 return node;
648 }
649
650 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
651 Return NULL if there's no such node. */
652
653 cgraph_node *
654 cgraph_node::get_for_asmname (tree asmname)
655 {
656 /* We do not want to look at inline clones. */
657 for (symtab_node *node = symtab_node::get_for_asmname (asmname);
658 node;
659 node = node->next_sharing_asm_name)
660 {
661 cgraph_node *cn = dyn_cast <cgraph_node *> (node);
662 if (cn && !cn->inlined_to)
663 return cn;
664 }
665 return NULL;
666 }
667
668 /* Returns a hash value for X (which really is a cgraph_edge). */
669
670 hashval_t
671 cgraph_edge_hasher::hash (cgraph_edge *e)
672 {
673 /* This is a really poor hash function, but it is what htab_hash_pointer
674 uses. */
675 return (hashval_t) ((intptr_t)e->call_stmt >> 3);
676 }
677
678 /* Returns a hash value for X (which really is a cgraph_edge). */
679
680 hashval_t
681 cgraph_edge_hasher::hash (gimple *call_stmt)
682 {
683 /* This is a really poor hash function, but it is what htab_hash_pointer
684 uses. */
685 return (hashval_t) ((intptr_t)call_stmt >> 3);
686 }
687
688 /* Return nonzero if the call_stmt of cgraph_edge X is stmt *Y. */
689
690 inline bool
691 cgraph_edge_hasher::equal (cgraph_edge *x, gimple *y)
692 {
693 return x->call_stmt == y;
694 }
695
696 /* Add call graph edge E to call site hash of its caller. */
697
698 static inline void
699 cgraph_update_edge_in_call_site_hash (cgraph_edge *e)
700 {
701 gimple *call = e->call_stmt;
702 *e->caller->call_site_hash->find_slot_with_hash
703 (call, cgraph_edge_hasher::hash (call), INSERT) = e;
704 }
705
706 /* Add call graph edge E to call site hash of its caller. */
707
708 static inline void
709 cgraph_add_edge_to_call_site_hash (cgraph_edge *e)
710 {
711 /* There are two speculative edges for every statement (one direct,
712 one indirect); always hash the direct one. */
713 if (e->speculative && e->indirect_unknown_callee)
714 return;
715 cgraph_edge **slot = e->caller->call_site_hash->find_slot_with_hash
716 (e->call_stmt, cgraph_edge_hasher::hash (e->call_stmt), INSERT);
717 if (*slot)
718 {
719 gcc_assert (((cgraph_edge *)*slot)->speculative);
720 if (e->callee)
721 *slot = e;
722 return;
723 }
724 gcc_assert (!*slot || e->speculative);
725 *slot = e;
726 }
727
728 /* Return the callgraph edge representing the GIMPLE_CALL statement
729 CALL_STMT. */
730
731 cgraph_edge *
732 cgraph_node::get_edge (gimple *call_stmt)
733 {
734 cgraph_edge *e, *e2;
735 int n = 0;
736
737 if (call_site_hash)
738 return call_site_hash->find_with_hash
739 (call_stmt, cgraph_edge_hasher::hash (call_stmt));
740
741 /* This loop may turn out to be performance problem. In such case adding
742 hashtables into call nodes with very many edges is probably best
743 solution. It is not good idea to add pointer into CALL_EXPR itself
744 because we want to make possible having multiple cgraph nodes representing
745 different clones of the same body before the body is actually cloned. */
746 for (e = callees; e; e = e->next_callee)
747 {
748 if (e->call_stmt == call_stmt)
749 break;
750 n++;
751 }
752
753 if (!e)
754 for (e = indirect_calls; e; e = e->next_callee)
755 {
756 if (e->call_stmt == call_stmt)
757 break;
758 n++;
759 }
760
761 if (n > 100)
762 {
763 call_site_hash = hash_table<cgraph_edge_hasher>::create_ggc (120);
764 for (e2 = callees; e2; e2 = e2->next_callee)
765 cgraph_add_edge_to_call_site_hash (e2);
766 for (e2 = indirect_calls; e2; e2 = e2->next_callee)
767 cgraph_add_edge_to_call_site_hash (e2);
768 }
769
770 return e;
771 }
772
773
774 /* Change field call_stmt of edge to NEW_STMT.
775 If UPDATE_SPECULATIVE and E is any component of speculative
776 edge, then update all components. */
777
778 void
779 cgraph_edge::set_call_stmt (gcall *new_stmt, bool update_speculative)
780 {
781 tree decl;
782
783 /* Speculative edges has three component, update all of them
784 when asked to. */
785 if (update_speculative && speculative)
786 {
787 cgraph_edge *direct, *indirect;
788 ipa_ref *ref;
789
790 speculative_call_info (direct, indirect, ref);
791 direct->set_call_stmt (new_stmt, false);
792 indirect->set_call_stmt (new_stmt, false);
793 ref->stmt = new_stmt;
794 return;
795 }
796
797 /* Only direct speculative edges go to call_site_hash. */
798 if (caller->call_site_hash
799 && (!speculative || !indirect_unknown_callee))
800 {
801 caller->call_site_hash->remove_elt_with_hash
802 (call_stmt, cgraph_edge_hasher::hash (call_stmt));
803 }
804
805 cgraph_edge *e = this;
806
807 call_stmt = new_stmt;
808 if (indirect_unknown_callee
809 && (decl = gimple_call_fndecl (new_stmt)))
810 {
811 /* Constant propagation (and possibly also inlining?) can turn an
812 indirect call into a direct one. */
813 cgraph_node *new_callee = cgraph_node::get (decl);
814
815 gcc_checking_assert (new_callee);
816 e = make_direct (new_callee);
817 }
818
819 function *fun = DECL_STRUCT_FUNCTION (e->caller->decl);
820 e->can_throw_external = stmt_can_throw_external (fun, new_stmt);
821 if (e->caller->call_site_hash)
822 cgraph_add_edge_to_call_site_hash (e);
823 }
824
825 /* Allocate a cgraph_edge structure and fill it with data according to the
826 parameters of which only CALLEE can be NULL (when creating an indirect call
827 edge). CLONING_P should be set if properties that are copied from an
828 original edge should not be calculated. */
829
830 cgraph_edge *
831 symbol_table::create_edge (cgraph_node *caller, cgraph_node *callee,
832 gcall *call_stmt, profile_count count,
833 bool indir_unknown_callee, bool cloning_p)
834 {
835 cgraph_edge *edge;
836
837 /* LTO does not actually have access to the call_stmt since these
838 have not been loaded yet. */
839 if (call_stmt)
840 {
841 /* This is a rather expensive check possibly triggering
842 construction of call stmt hashtable. */
843 cgraph_edge *e;
844 gcc_checking_assert (!(e = caller->get_edge (call_stmt))
845 || e->speculative);
846
847 gcc_assert (is_gimple_call (call_stmt));
848 }
849
850 edge = ggc_alloc<cgraph_edge> ();
851 edge->m_summary_id = -1;
852 edges_count++;
853
854 gcc_assert (++edges_max_uid != 0);
855 edge->m_uid = edges_max_uid;
856 edge->aux = NULL;
857 edge->caller = caller;
858 edge->callee = callee;
859 edge->prev_caller = NULL;
860 edge->next_caller = NULL;
861 edge->prev_callee = NULL;
862 edge->next_callee = NULL;
863 edge->lto_stmt_uid = 0;
864
865 edge->count = count;
866 edge->call_stmt = call_stmt;
867 edge->indirect_info = NULL;
868 edge->indirect_inlining_edge = 0;
869 edge->speculative = false;
870 edge->indirect_unknown_callee = indir_unknown_callee;
871 if (call_stmt && caller->call_site_hash)
872 cgraph_add_edge_to_call_site_hash (edge);
873
874 if (cloning_p)
875 return edge;
876
877 edge->can_throw_external
878 = call_stmt ? stmt_can_throw_external (DECL_STRUCT_FUNCTION (caller->decl),
879 call_stmt) : false;
880 if (call_stmt
881 && callee && callee->decl
882 && !gimple_check_call_matching_types (call_stmt, callee->decl,
883 false))
884 {
885 edge->inline_failed = CIF_MISMATCHED_ARGUMENTS;
886 edge->call_stmt_cannot_inline_p = true;
887 }
888 else
889 {
890 edge->inline_failed = CIF_FUNCTION_NOT_CONSIDERED;
891 edge->call_stmt_cannot_inline_p = false;
892 }
893
894 if (opt_for_fn (edge->caller->decl, flag_devirtualize)
895 && call_stmt && DECL_STRUCT_FUNCTION (caller->decl))
896 edge->in_polymorphic_cdtor
897 = decl_maybe_in_construction_p (NULL, NULL, call_stmt,
898 caller->decl);
899 else
900 edge->in_polymorphic_cdtor = caller->thunk.thunk_p;
901
902 return edge;
903 }
904
905 /* Create edge from a given function to CALLEE in the cgraph. CLONING_P should
906 be set if properties that are copied from an original edge should not be
907 calculated. */
908
909 cgraph_edge *
910 cgraph_node::create_edge (cgraph_node *callee,
911 gcall *call_stmt, profile_count count, bool cloning_p)
912 {
913 cgraph_edge *edge = symtab->create_edge (this, callee, call_stmt, count,
914 false, cloning_p);
915
916 if (!cloning_p)
917 initialize_inline_failed (edge);
918
919 edge->next_caller = callee->callers;
920 if (callee->callers)
921 callee->callers->prev_caller = edge;
922 edge->next_callee = callees;
923 if (callees)
924 callees->prev_callee = edge;
925 callees = edge;
926 callee->callers = edge;
927
928 return edge;
929 }
930
931 /* Allocate cgraph_indirect_call_info and set its fields to default values. */
932
933 cgraph_indirect_call_info *
934 cgraph_allocate_init_indirect_info (void)
935 {
936 cgraph_indirect_call_info *ii;
937
938 ii = ggc_cleared_alloc<cgraph_indirect_call_info> ();
939 ii->param_index = -1;
940 return ii;
941 }
942
943 /* Create an indirect edge with a yet-undetermined callee where the call
944 statement destination is a formal parameter of the caller with index
945 PARAM_INDEX. CLONING_P should be set if properties that are copied from an
946 original edge should not be calculated and indirect_info structure should
947 not be calculated. */
948
949 cgraph_edge *
950 cgraph_node::create_indirect_edge (gcall *call_stmt, int ecf_flags,
951 profile_count count,
952 bool cloning_p)
953 {
954 cgraph_edge *edge = symtab->create_edge (this, NULL, call_stmt, count, true,
955 cloning_p);
956 tree target;
957
958 if (!cloning_p)
959 initialize_inline_failed (edge);
960
961 edge->indirect_info = cgraph_allocate_init_indirect_info ();
962 edge->indirect_info->ecf_flags = ecf_flags;
963 edge->indirect_info->vptr_changed = true;
964
965 /* Record polymorphic call info. */
966 if (!cloning_p
967 && call_stmt
968 && (target = gimple_call_fn (call_stmt))
969 && virtual_method_call_p (target))
970 {
971 ipa_polymorphic_call_context context (decl, target, call_stmt);
972
973 /* Only record types can have virtual calls. */
974 edge->indirect_info->polymorphic = true;
975 edge->indirect_info->param_index = -1;
976 edge->indirect_info->otr_token
977 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
978 edge->indirect_info->otr_type = obj_type_ref_class (target);
979 gcc_assert (TREE_CODE (edge->indirect_info->otr_type) == RECORD_TYPE);
980 edge->indirect_info->context = context;
981 }
982
983 edge->next_callee = indirect_calls;
984 if (indirect_calls)
985 indirect_calls->prev_callee = edge;
986 indirect_calls = edge;
987
988 return edge;
989 }
990
991 /* Remove the edge from the list of the callees of the caller. */
992
993 void
994 cgraph_edge::remove_caller (void)
995 {
996 if (prev_callee)
997 prev_callee->next_callee = next_callee;
998 if (next_callee)
999 next_callee->prev_callee = prev_callee;
1000 if (!prev_callee)
1001 {
1002 if (indirect_unknown_callee)
1003 caller->indirect_calls = next_callee;
1004 else
1005 caller->callees = next_callee;
1006 }
1007 if (caller->call_site_hash)
1008 caller->call_site_hash->remove_elt_with_hash
1009 (call_stmt, cgraph_edge_hasher::hash (call_stmt));
1010 }
1011
1012 /* Put the edge onto the free list. */
1013
1014 void
1015 symbol_table::free_edge (cgraph_edge *e)
1016 {
1017 edges_count--;
1018 if (e->m_summary_id != -1)
1019 edge_released_summary_ids.safe_push (e->m_summary_id);
1020
1021 if (e->indirect_info)
1022 ggc_free (e->indirect_info);
1023 ggc_free (e);
1024 }
1025
1026 /* Remove the edge in the cgraph. */
1027
1028 void
1029 cgraph_edge::remove (void)
1030 {
1031 /* Call all edge removal hooks. */
1032 symtab->call_edge_removal_hooks (this);
1033
1034 if (!indirect_unknown_callee)
1035 /* Remove from callers list of the callee. */
1036 remove_callee ();
1037
1038 /* Remove from callees list of the callers. */
1039 remove_caller ();
1040
1041 /* Put the edge onto the free list. */
1042 symtab->free_edge (this);
1043 }
1044
1045 /* Turn edge into speculative call calling N2. Update
1046 the profile so the direct call is taken COUNT times
1047 with FREQUENCY.
1048
1049 At clone materialization time, the indirect call E will
1050 be expanded as:
1051
1052 if (call_dest == N2)
1053 n2 ();
1054 else
1055 call call_dest
1056
1057 At this time the function just creates the direct call,
1058 the referencd representing the if conditional and attaches
1059 them all to the orginal indirect call statement.
1060
1061 Return direct edge created. */
1062
1063 cgraph_edge *
1064 cgraph_edge::make_speculative (cgraph_node *n2, profile_count direct_count)
1065 {
1066 cgraph_node *n = caller;
1067 ipa_ref *ref = NULL;
1068 cgraph_edge *e2;
1069
1070 if (dump_file)
1071 fprintf (dump_file, "Indirect call -> speculative call %s => %s\n",
1072 n->dump_name (), n2->dump_name ());
1073 speculative = true;
1074 e2 = n->create_edge (n2, call_stmt, direct_count);
1075 initialize_inline_failed (e2);
1076 e2->speculative = true;
1077 if (TREE_NOTHROW (n2->decl))
1078 e2->can_throw_external = false;
1079 else
1080 e2->can_throw_external = can_throw_external;
1081 e2->lto_stmt_uid = lto_stmt_uid;
1082 e2->in_polymorphic_cdtor = in_polymorphic_cdtor;
1083 count -= e2->count;
1084 symtab->call_edge_duplication_hooks (this, e2);
1085 ref = n->create_reference (n2, IPA_REF_ADDR, call_stmt);
1086 ref->lto_stmt_uid = lto_stmt_uid;
1087 ref->speculative = speculative;
1088 n2->mark_address_taken ();
1089 return e2;
1090 }
1091
1092 /* Speculative call consist of three components:
1093 1) an indirect edge representing the original call
1094 2) an direct edge representing the new call
1095 3) ADDR_EXPR reference representing the speculative check.
1096 All three components are attached to single statement (the indirect
1097 call) and if one of them exists, all of them must exist.
1098
1099 Given speculative call edge, return all three components.
1100 */
1101
1102 void
1103 cgraph_edge::speculative_call_info (cgraph_edge *&direct,
1104 cgraph_edge *&indirect,
1105 ipa_ref *&reference)
1106 {
1107 ipa_ref *ref;
1108 int i;
1109 cgraph_edge *e2;
1110 cgraph_edge *e = this;
1111
1112 if (!e->indirect_unknown_callee)
1113 for (e2 = e->caller->indirect_calls;
1114 e2->call_stmt != e->call_stmt || e2->lto_stmt_uid != e->lto_stmt_uid;
1115 e2 = e2->next_callee)
1116 ;
1117 else
1118 {
1119 e2 = e;
1120 /* We can take advantage of the call stmt hash. */
1121 if (e2->call_stmt)
1122 {
1123 e = e->caller->get_edge (e2->call_stmt);
1124 gcc_assert (e->speculative && !e->indirect_unknown_callee);
1125 }
1126 else
1127 for (e = e->caller->callees;
1128 e2->call_stmt != e->call_stmt
1129 || e2->lto_stmt_uid != e->lto_stmt_uid;
1130 e = e->next_callee)
1131 ;
1132 }
1133 gcc_assert (e->speculative && e2->speculative);
1134 direct = e;
1135 indirect = e2;
1136
1137 reference = NULL;
1138 for (i = 0; e->caller->iterate_reference (i, ref); i++)
1139 if (ref->speculative
1140 && ((ref->stmt && ref->stmt == e->call_stmt)
1141 || (!ref->stmt && ref->lto_stmt_uid == e->lto_stmt_uid)))
1142 {
1143 reference = ref;
1144 break;
1145 }
1146
1147 /* Speculative edge always consist of all three components - direct edge,
1148 indirect and reference. */
1149
1150 gcc_assert (e && e2 && ref);
1151 }
1152
1153 /* Speculative call edge turned out to be direct call to CALLE_DECL.
1154 Remove the speculative call sequence and return edge representing the call.
1155 It is up to caller to redirect the call as appropriate. */
1156
1157 cgraph_edge *
1158 cgraph_edge::resolve_speculation (tree callee_decl)
1159 {
1160 cgraph_edge *edge = this;
1161 cgraph_edge *e2;
1162 ipa_ref *ref;
1163
1164 gcc_assert (edge->speculative);
1165 edge->speculative_call_info (e2, edge, ref);
1166 if (!callee_decl
1167 || !ref->referred->semantically_equivalent_p
1168 (symtab_node::get (callee_decl)))
1169 {
1170 if (dump_file)
1171 {
1172 if (callee_decl)
1173 {
1174 fprintf (dump_file, "Speculative indirect call %s => %s has "
1175 "turned out to have contradicting known target ",
1176 edge->caller->dump_name (),
1177 e2->callee->dump_name ());
1178 print_generic_expr (dump_file, callee_decl);
1179 fprintf (dump_file, "\n");
1180 }
1181 else
1182 {
1183 fprintf (dump_file, "Removing speculative call %s => %s\n",
1184 edge->caller->dump_name (),
1185 e2->callee->dump_name ());
1186 }
1187 }
1188 }
1189 else
1190 {
1191 cgraph_edge *tmp = edge;
1192 if (dump_file)
1193 fprintf (dump_file, "Speculative call turned into direct call.\n");
1194 edge = e2;
1195 e2 = tmp;
1196 /* FIXME: If EDGE is inlined, we should scale up the frequencies and counts
1197 in the functions inlined through it. */
1198 }
1199 edge->count += e2->count;
1200 edge->speculative = false;
1201 e2->speculative = false;
1202 ref->remove_reference ();
1203 if (e2->indirect_unknown_callee || e2->inline_failed)
1204 e2->remove ();
1205 else
1206 e2->callee->remove_symbol_and_inline_clones ();
1207 if (edge->caller->call_site_hash)
1208 cgraph_update_edge_in_call_site_hash (edge);
1209 return edge;
1210 }
1211
1212 /* Make an indirect edge with an unknown callee an ordinary edge leading to
1213 CALLEE. DELTA is an integer constant that is to be added to the this
1214 pointer (first parameter) to compensate for skipping a thunk adjustment. */
1215
1216 cgraph_edge *
1217 cgraph_edge::make_direct (cgraph_node *callee)
1218 {
1219 cgraph_edge *edge = this;
1220 gcc_assert (indirect_unknown_callee);
1221
1222 /* If we are redirecting speculative call, make it non-speculative. */
1223 if (indirect_unknown_callee && speculative)
1224 {
1225 edge = edge->resolve_speculation (callee->decl);
1226
1227 /* On successful speculation just return the pre existing direct edge. */
1228 if (!edge->indirect_unknown_callee)
1229 return edge;
1230 }
1231
1232 indirect_unknown_callee = 0;
1233 ggc_free (indirect_info);
1234 indirect_info = NULL;
1235
1236 /* Get the edge out of the indirect edge list. */
1237 if (prev_callee)
1238 prev_callee->next_callee = next_callee;
1239 if (next_callee)
1240 next_callee->prev_callee = prev_callee;
1241 if (!prev_callee)
1242 caller->indirect_calls = next_callee;
1243
1244 /* Put it into the normal callee list */
1245 prev_callee = NULL;
1246 next_callee = caller->callees;
1247 if (caller->callees)
1248 caller->callees->prev_callee = edge;
1249 caller->callees = edge;
1250
1251 /* Insert to callers list of the new callee. */
1252 edge->set_callee (callee);
1253
1254 if (call_stmt
1255 && !gimple_check_call_matching_types (call_stmt, callee->decl, false))
1256 {
1257 call_stmt_cannot_inline_p = true;
1258 inline_failed = CIF_MISMATCHED_ARGUMENTS;
1259 }
1260
1261 /* We need to re-determine the inlining status of the edge. */
1262 initialize_inline_failed (edge);
1263 return edge;
1264 }
1265
1266 /* If necessary, change the function declaration in the call statement
1267 associated with E so that it corresponds to the edge callee. */
1268
1269 gimple *
1270 cgraph_edge::redirect_call_stmt_to_callee (void)
1271 {
1272 cgraph_edge *e = this;
1273
1274 tree decl = gimple_call_fndecl (e->call_stmt);
1275 gcall *new_stmt;
1276 gimple_stmt_iterator gsi;
1277
1278 if (e->speculative)
1279 {
1280 cgraph_edge *e2;
1281 gcall *new_stmt;
1282 ipa_ref *ref;
1283
1284 e->speculative_call_info (e, e2, ref);
1285 /* If there already is an direct call (i.e. as a result of inliner's
1286 substitution), forget about speculating. */
1287 if (decl)
1288 e = e->resolve_speculation (decl);
1289 /* If types do not match, speculation was likely wrong.
1290 The direct edge was possibly redirected to the clone with a different
1291 signature. We did not update the call statement yet, so compare it
1292 with the reference that still points to the proper type. */
1293 else if (!gimple_check_call_matching_types (e->call_stmt,
1294 ref->referred->decl,
1295 true))
1296 {
1297 if (dump_file)
1298 fprintf (dump_file, "Not expanding speculative call of %s -> %s\n"
1299 "Type mismatch.\n",
1300 e->caller->dump_name (),
1301 e->callee->dump_name ());
1302 e = e->resolve_speculation ();
1303 /* We are producing the final function body and will throw away the
1304 callgraph edges really soon. Reset the counts/frequencies to
1305 keep verifier happy in the case of roundoff errors. */
1306 e->count = gimple_bb (e->call_stmt)->count;
1307 }
1308 /* Expand speculation into GIMPLE code. */
1309 else
1310 {
1311 if (dump_file)
1312 {
1313 fprintf (dump_file,
1314 "Expanding speculative call of %s -> %s count: ",
1315 e->caller->dump_name (),
1316 e->callee->dump_name ());
1317 e->count.dump (dump_file);
1318 fprintf (dump_file, "\n");
1319 }
1320 gcc_assert (e2->speculative);
1321 push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
1322
1323 profile_probability prob = e->count.probability_in (e->count
1324 + e2->count);
1325 if (!prob.initialized_p ())
1326 prob = profile_probability::even ();
1327 new_stmt = gimple_ic (e->call_stmt,
1328 dyn_cast<cgraph_node *> (ref->referred),
1329 prob);
1330 e->speculative = false;
1331 e->caller->set_call_stmt_including_clones (e->call_stmt, new_stmt,
1332 false);
1333 e->count = gimple_bb (e->call_stmt)->count;
1334 e2->speculative = false;
1335 e2->count = gimple_bb (e2->call_stmt)->count;
1336 ref->speculative = false;
1337 ref->stmt = NULL;
1338 /* Indirect edges are not both in the call site hash.
1339 get it updated. */
1340 if (e->caller->call_site_hash)
1341 cgraph_update_edge_in_call_site_hash (e2);
1342 pop_cfun ();
1343 /* Continue redirecting E to proper target. */
1344 }
1345 }
1346
1347
1348 if (e->indirect_unknown_callee
1349 || decl == e->callee->decl)
1350 return e->call_stmt;
1351
1352 if (flag_checking && decl)
1353 {
1354 cgraph_node *node = cgraph_node::get (decl);
1355 gcc_assert (!node || !node->clone.param_adjustments);
1356 }
1357
1358 if (symtab->dump_file)
1359 {
1360 fprintf (symtab->dump_file, "updating call of %s -> %s: ",
1361 e->caller->dump_name (), e->callee->dump_name ());
1362 print_gimple_stmt (symtab->dump_file, e->call_stmt, 0, dump_flags);
1363 if (e->callee->clone.param_adjustments)
1364 e->callee->clone.param_adjustments->dump (symtab->dump_file);
1365 unsigned performed_len
1366 = vec_safe_length (e->caller->clone.performed_splits);
1367 if (performed_len > 0)
1368 fprintf (symtab->dump_file, "Performed splits records:\n");
1369 for (unsigned i = 0; i < performed_len; i++)
1370 {
1371 ipa_param_performed_split *sm
1372 = &(*e->caller->clone.performed_splits)[i];
1373 print_node_brief (symtab->dump_file, " dummy_decl: ", sm->dummy_decl,
1374 TDF_UID);
1375 fprintf (symtab->dump_file, ", unit_offset: %u\n", sm->unit_offset);
1376 }
1377 }
1378
1379 if (ipa_param_adjustments *padjs = e->callee->clone.param_adjustments)
1380 {
1381 /* We need to defer cleaning EH info on the new statement to
1382 fixup-cfg. We may not have dominator information at this point
1383 and thus would end up with unreachable blocks and have no way
1384 to communicate that we need to run CFG cleanup then. */
1385 int lp_nr = lookup_stmt_eh_lp (e->call_stmt);
1386 if (lp_nr != 0)
1387 remove_stmt_from_eh_lp (e->call_stmt);
1388
1389 tree old_fntype = gimple_call_fntype (e->call_stmt);
1390 new_stmt = padjs->modify_call (e->call_stmt,
1391 e->caller->clone.performed_splits,
1392 e->callee->decl, false);
1393 cgraph_node *origin = e->callee;
1394 while (origin->clone_of)
1395 origin = origin->clone_of;
1396
1397 if ((origin->former_clone_of
1398 && old_fntype == TREE_TYPE (origin->former_clone_of))
1399 || old_fntype == TREE_TYPE (origin->decl))
1400 gimple_call_set_fntype (new_stmt, TREE_TYPE (e->callee->decl));
1401 else
1402 {
1403 tree new_fntype = padjs->build_new_function_type (old_fntype, true);
1404 gimple_call_set_fntype (new_stmt, new_fntype);
1405 }
1406
1407 if (lp_nr != 0)
1408 add_stmt_to_eh_lp (new_stmt, lp_nr);
1409 }
1410 else
1411 {
1412 new_stmt = e->call_stmt;
1413 gimple_call_set_fndecl (new_stmt, e->callee->decl);
1414 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1415 }
1416
1417 /* If changing the call to __cxa_pure_virtual or similar noreturn function,
1418 adjust gimple_call_fntype too. */
1419 if (gimple_call_noreturn_p (new_stmt)
1420 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (e->callee->decl)))
1421 && TYPE_ARG_TYPES (TREE_TYPE (e->callee->decl))
1422 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (e->callee->decl)))
1423 == void_type_node))
1424 gimple_call_set_fntype (new_stmt, TREE_TYPE (e->callee->decl));
1425
1426 /* If the call becomes noreturn, remove the LHS if possible. */
1427 tree lhs = gimple_call_lhs (new_stmt);
1428 if (lhs
1429 && gimple_call_noreturn_p (new_stmt)
1430 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (new_stmt)))
1431 || should_remove_lhs_p (lhs)))
1432 {
1433 if (TREE_CODE (lhs) == SSA_NAME)
1434 {
1435 tree var = create_tmp_reg_fn (DECL_STRUCT_FUNCTION (e->caller->decl),
1436 TREE_TYPE (lhs), NULL);
1437 var = get_or_create_ssa_default_def
1438 (DECL_STRUCT_FUNCTION (e->caller->decl), var);
1439 gimple *set_stmt = gimple_build_assign (lhs, var);
1440 gsi = gsi_for_stmt (new_stmt);
1441 gsi_insert_before_without_update (&gsi, set_stmt, GSI_SAME_STMT);
1442 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), set_stmt);
1443 }
1444 gimple_call_set_lhs (new_stmt, NULL_TREE);
1445 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1446 }
1447
1448 /* If new callee has no static chain, remove it. */
1449 if (gimple_call_chain (new_stmt) && !DECL_STATIC_CHAIN (e->callee->decl))
1450 {
1451 gimple_call_set_chain (new_stmt, NULL);
1452 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1453 }
1454
1455 maybe_remove_unused_call_args (DECL_STRUCT_FUNCTION (e->caller->decl),
1456 new_stmt);
1457
1458 e->caller->set_call_stmt_including_clones (e->call_stmt, new_stmt, false);
1459
1460 if (symtab->dump_file)
1461 {
1462 fprintf (symtab->dump_file, " updated to:");
1463 print_gimple_stmt (symtab->dump_file, e->call_stmt, 0, dump_flags);
1464 }
1465 return new_stmt;
1466 }
1467
1468 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1469 OLD_STMT changed into NEW_STMT. OLD_CALL is gimple_call_fndecl
1470 of OLD_STMT if it was previously call statement.
1471 If NEW_STMT is NULL, the call has been dropped without any
1472 replacement. */
1473
1474 static void
1475 cgraph_update_edges_for_call_stmt_node (cgraph_node *node,
1476 gimple *old_stmt, tree old_call,
1477 gimple *new_stmt)
1478 {
1479 tree new_call = (new_stmt && is_gimple_call (new_stmt))
1480 ? gimple_call_fndecl (new_stmt) : 0;
1481
1482 /* We are seeing indirect calls, then there is nothing to update. */
1483 if (!new_call && !old_call)
1484 return;
1485 /* See if we turned indirect call into direct call or folded call to one builtin
1486 into different builtin. */
1487 if (old_call != new_call)
1488 {
1489 cgraph_edge *e = node->get_edge (old_stmt);
1490 cgraph_edge *ne = NULL;
1491 profile_count count;
1492
1493 if (e)
1494 {
1495 /* Keep calls marked as dead dead. */
1496 if (new_stmt && is_gimple_call (new_stmt) && e->callee
1497 && fndecl_built_in_p (e->callee->decl, BUILT_IN_UNREACHABLE))
1498 {
1499 node->get_edge (old_stmt)->set_call_stmt
1500 (as_a <gcall *> (new_stmt));
1501 return;
1502 }
1503 /* See if the edge is already there and has the correct callee. It
1504 might be so because of indirect inlining has already updated
1505 it. We also might've cloned and redirected the edge. */
1506 if (new_call && e->callee)
1507 {
1508 cgraph_node *callee = e->callee;
1509 while (callee)
1510 {
1511 if (callee->decl == new_call
1512 || callee->former_clone_of == new_call)
1513 {
1514 e->set_call_stmt (as_a <gcall *> (new_stmt));
1515 return;
1516 }
1517 callee = callee->clone_of;
1518 }
1519 }
1520
1521 /* Otherwise remove edge and create new one; we can't simply redirect
1522 since function has changed, so inline plan and other information
1523 attached to edge is invalid. */
1524 count = e->count;
1525 if (e->indirect_unknown_callee || e->inline_failed)
1526 e->remove ();
1527 else
1528 e->callee->remove_symbol_and_inline_clones ();
1529 }
1530 else if (new_call)
1531 {
1532 /* We are seeing new direct call; compute profile info based on BB. */
1533 basic_block bb = gimple_bb (new_stmt);
1534 count = bb->count;
1535 }
1536
1537 if (new_call)
1538 {
1539 ne = node->create_edge (cgraph_node::get_create (new_call),
1540 as_a <gcall *> (new_stmt), count);
1541 gcc_assert (ne->inline_failed);
1542 }
1543 }
1544 /* We only updated the call stmt; update pointer in cgraph edge.. */
1545 else if (old_stmt != new_stmt)
1546 node->get_edge (old_stmt)->set_call_stmt (as_a <gcall *> (new_stmt));
1547 }
1548
1549 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1550 OLD_STMT changed into NEW_STMT. OLD_DECL is gimple_call_fndecl
1551 of OLD_STMT before it was updated (updating can happen inplace). */
1552
1553 void
1554 cgraph_update_edges_for_call_stmt (gimple *old_stmt, tree old_decl,
1555 gimple *new_stmt)
1556 {
1557 cgraph_node *orig = cgraph_node::get (cfun->decl);
1558 cgraph_node *node;
1559
1560 gcc_checking_assert (orig);
1561 cgraph_update_edges_for_call_stmt_node (orig, old_stmt, old_decl, new_stmt);
1562 if (orig->clones)
1563 for (node = orig->clones; node != orig;)
1564 {
1565 cgraph_update_edges_for_call_stmt_node (node, old_stmt, old_decl, new_stmt);
1566 if (node->clones)
1567 node = node->clones;
1568 else if (node->next_sibling_clone)
1569 node = node->next_sibling_clone;
1570 else
1571 {
1572 while (node != orig && !node->next_sibling_clone)
1573 node = node->clone_of;
1574 if (node != orig)
1575 node = node->next_sibling_clone;
1576 }
1577 }
1578 }
1579
1580
1581 /* Remove all callees from the node. */
1582
1583 void
1584 cgraph_node::remove_callees (void)
1585 {
1586 cgraph_edge *e, *f;
1587
1588 /* It is sufficient to remove the edges from the lists of callers of
1589 the callees. The callee list of the node can be zapped with one
1590 assignment. */
1591 for (e = callees; e; e = f)
1592 {
1593 f = e->next_callee;
1594 symtab->call_edge_removal_hooks (e);
1595 if (!e->indirect_unknown_callee)
1596 e->remove_callee ();
1597 symtab->free_edge (e);
1598 }
1599 for (e = indirect_calls; e; e = f)
1600 {
1601 f = e->next_callee;
1602 symtab->call_edge_removal_hooks (e);
1603 if (!e->indirect_unknown_callee)
1604 e->remove_callee ();
1605 symtab->free_edge (e);
1606 }
1607 indirect_calls = NULL;
1608 callees = NULL;
1609 if (call_site_hash)
1610 {
1611 call_site_hash->empty ();
1612 call_site_hash = NULL;
1613 }
1614 }
1615
1616 /* Remove all callers from the node. */
1617
1618 void
1619 cgraph_node::remove_callers (void)
1620 {
1621 cgraph_edge *e, *f;
1622
1623 /* It is sufficient to remove the edges from the lists of callees of
1624 the callers. The caller list of the node can be zapped with one
1625 assignment. */
1626 for (e = callers; e; e = f)
1627 {
1628 f = e->next_caller;
1629 symtab->call_edge_removal_hooks (e);
1630 e->remove_caller ();
1631 symtab->free_edge (e);
1632 }
1633 callers = NULL;
1634 }
1635
1636 /* Helper function for cgraph_release_function_body and free_lang_data.
1637 It releases body from function DECL without having to inspect its
1638 possibly non-existent symtab node. */
1639
1640 void
1641 release_function_body (tree decl)
1642 {
1643 function *fn = DECL_STRUCT_FUNCTION (decl);
1644 if (fn)
1645 {
1646 if (fn->cfg
1647 && loops_for_fn (fn))
1648 {
1649 fn->curr_properties &= ~PROP_loops;
1650 loop_optimizer_finalize (fn);
1651 }
1652 if (fn->gimple_df)
1653 {
1654 delete_tree_ssa (fn);
1655 fn->eh = NULL;
1656 }
1657 if (fn->cfg)
1658 {
1659 gcc_assert (!dom_info_available_p (fn, CDI_DOMINATORS));
1660 gcc_assert (!dom_info_available_p (fn, CDI_POST_DOMINATORS));
1661 delete_tree_cfg_annotations (fn);
1662 clear_edges (fn);
1663 fn->cfg = NULL;
1664 }
1665 if (fn->value_histograms)
1666 free_histograms (fn);
1667 gimple_set_body (decl, NULL);
1668 /* Struct function hangs a lot of data that would leak if we didn't
1669 removed all pointers to it. */
1670 ggc_free (fn);
1671 DECL_STRUCT_FUNCTION (decl) = NULL;
1672 }
1673 DECL_SAVED_TREE (decl) = NULL;
1674 }
1675
1676 /* Release memory used to represent body of function.
1677 Use this only for functions that are released before being translated to
1678 target code (i.e. RTL). Functions that are compiled to RTL and beyond
1679 are free'd in final.c via free_after_compilation().
1680 KEEP_ARGUMENTS are useful only if you want to rebuild body as thunk. */
1681
1682 void
1683 cgraph_node::release_body (bool keep_arguments)
1684 {
1685 ipa_transforms_to_apply.release ();
1686 if (!used_as_abstract_origin && symtab->state != PARSING)
1687 {
1688 DECL_RESULT (decl) = NULL;
1689
1690 if (!keep_arguments)
1691 DECL_ARGUMENTS (decl) = NULL;
1692 }
1693 /* If the node is abstract and needed, then do not clear
1694 DECL_INITIAL of its associated function declaration because it's
1695 needed to emit debug info later. */
1696 if (!used_as_abstract_origin && DECL_INITIAL (decl))
1697 DECL_INITIAL (decl) = error_mark_node;
1698 release_function_body (decl);
1699 if (lto_file_data)
1700 {
1701 lto_free_function_in_decl_state_for_node (this);
1702 lto_file_data = NULL;
1703 }
1704 }
1705
1706 /* Remove function from symbol table. */
1707
1708 void
1709 cgraph_node::remove (void)
1710 {
1711 if (symtab->ipa_clones_dump_file && symtab->cloned_nodes.contains (this))
1712 fprintf (symtab->ipa_clones_dump_file,
1713 "Callgraph removal;%s;%d;%s;%d;%d\n", asm_name (), order,
1714 DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl),
1715 DECL_SOURCE_COLUMN (decl));
1716
1717 symtab->call_cgraph_removal_hooks (this);
1718 remove_callers ();
1719 remove_callees ();
1720 ipa_transforms_to_apply.release ();
1721 delete_function_version (function_version ());
1722
1723 /* Incremental inlining access removed nodes stored in the postorder list.
1724 */
1725 force_output = false;
1726 forced_by_abi = false;
1727 cgraph_node *next;
1728 for (cgraph_node *n = nested; n; n = next)
1729 {
1730 next = n->next_nested;
1731 n->origin = NULL;
1732 n->next_nested = NULL;
1733 }
1734 nested = NULL;
1735 if (origin)
1736 {
1737 cgraph_node **node2 = &origin->nested;
1738
1739 while (*node2 != this)
1740 node2 = &(*node2)->next_nested;
1741 *node2 = next_nested;
1742 }
1743 unregister ();
1744 if (prev_sibling_clone)
1745 prev_sibling_clone->next_sibling_clone = next_sibling_clone;
1746 else if (clone_of)
1747 clone_of->clones = next_sibling_clone;
1748 if (next_sibling_clone)
1749 next_sibling_clone->prev_sibling_clone = prev_sibling_clone;
1750 if (clones)
1751 {
1752 cgraph_node *n, *next;
1753
1754 if (clone_of)
1755 {
1756 for (n = clones; n->next_sibling_clone; n = n->next_sibling_clone)
1757 n->clone_of = clone_of;
1758 n->clone_of = clone_of;
1759 n->next_sibling_clone = clone_of->clones;
1760 if (clone_of->clones)
1761 clone_of->clones->prev_sibling_clone = n;
1762 clone_of->clones = clones;
1763 }
1764 else
1765 {
1766 /* We are removing node with clones. This makes clones inconsistent,
1767 but assume they will be removed subsequently and just keep clone
1768 tree intact. This can happen in unreachable function removal since
1769 we remove unreachable functions in random order, not by bottom-up
1770 walk of clone trees. */
1771 for (n = clones; n; n = next)
1772 {
1773 next = n->next_sibling_clone;
1774 n->next_sibling_clone = NULL;
1775 n->prev_sibling_clone = NULL;
1776 n->clone_of = NULL;
1777 }
1778 }
1779 }
1780
1781 /* While all the clones are removed after being proceeded, the function
1782 itself is kept in the cgraph even after it is compiled. Check whether
1783 we are done with this body and reclaim it proactively if this is the case.
1784 */
1785 if (symtab->state != LTO_STREAMING)
1786 {
1787 cgraph_node *n = cgraph_node::get (decl);
1788 if (!n
1789 || (!n->clones && !n->clone_of && !n->inlined_to
1790 && ((symtab->global_info_ready || in_lto_p)
1791 && (TREE_ASM_WRITTEN (n->decl)
1792 || DECL_EXTERNAL (n->decl)
1793 || !n->analyzed
1794 || (!flag_wpa && n->in_other_partition)))))
1795 release_body ();
1796 }
1797 else
1798 {
1799 lto_free_function_in_decl_state_for_node (this);
1800 lto_file_data = NULL;
1801 }
1802
1803 decl = NULL;
1804 if (call_site_hash)
1805 {
1806 call_site_hash->empty ();
1807 call_site_hash = NULL;
1808 }
1809
1810 symtab->release_symbol (this);
1811 }
1812
1813 /* Likewise indicate that a node is having address taken. */
1814
1815 void
1816 cgraph_node::mark_address_taken (void)
1817 {
1818 /* Indirect inlining can figure out that all uses of the address are
1819 inlined. */
1820 if (inlined_to)
1821 {
1822 gcc_assert (cfun->after_inlining);
1823 gcc_assert (callers->indirect_inlining_edge);
1824 return;
1825 }
1826 /* FIXME: address_taken flag is used both as a shortcut for testing whether
1827 IPA_REF_ADDR reference exists (and thus it should be set on node
1828 representing alias we take address of) and as a test whether address
1829 of the object was taken (and thus it should be set on node alias is
1830 referring to). We should remove the first use and the remove the
1831 following set. */
1832 address_taken = 1;
1833 cgraph_node *node = ultimate_alias_target ();
1834 node->address_taken = 1;
1835 }
1836
1837 /* Return local info node for the compiled function. */
1838
1839 cgraph_node *
1840 cgraph_node::local_info_node (tree decl)
1841 {
1842 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1843 cgraph_node *node = get (decl);
1844 if (!node)
1845 return NULL;
1846 return node->ultimate_alias_target ();
1847 }
1848
1849 /* Return RTL info for the compiled function. */
1850
1851 cgraph_rtl_info *
1852 cgraph_node::rtl_info (const_tree decl)
1853 {
1854 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1855 cgraph_node *node = get (decl);
1856 if (!node)
1857 return NULL;
1858 enum availability avail;
1859 node = node->ultimate_alias_target (&avail);
1860 if (decl != current_function_decl
1861 && (avail < AVAIL_AVAILABLE
1862 || (node->decl != current_function_decl
1863 && !TREE_ASM_WRITTEN (node->decl))))
1864 return NULL;
1865 /* Allocate if it doesn't exist. */
1866 if (node->rtl == NULL)
1867 {
1868 node->rtl = ggc_cleared_alloc<cgraph_rtl_info> ();
1869 SET_HARD_REG_SET (node->rtl->function_used_regs);
1870 }
1871 return node->rtl;
1872 }
1873
1874 /* Return a string describing the failure REASON. */
1875
1876 const char*
1877 cgraph_inline_failed_string (cgraph_inline_failed_t reason)
1878 {
1879 #undef DEFCIFCODE
1880 #define DEFCIFCODE(code, type, string) string,
1881
1882 static const char *cif_string_table[CIF_N_REASONS] = {
1883 #include "cif-code.def"
1884 };
1885
1886 /* Signedness of an enum type is implementation defined, so cast it
1887 to unsigned before testing. */
1888 gcc_assert ((unsigned) reason < CIF_N_REASONS);
1889 return cif_string_table[reason];
1890 }
1891
1892 /* Return a type describing the failure REASON. */
1893
1894 cgraph_inline_failed_type_t
1895 cgraph_inline_failed_type (cgraph_inline_failed_t reason)
1896 {
1897 #undef DEFCIFCODE
1898 #define DEFCIFCODE(code, type, string) type,
1899
1900 static cgraph_inline_failed_type_t cif_type_table[CIF_N_REASONS] = {
1901 #include "cif-code.def"
1902 };
1903
1904 /* Signedness of an enum type is implementation defined, so cast it
1905 to unsigned before testing. */
1906 gcc_assert ((unsigned) reason < CIF_N_REASONS);
1907 return cif_type_table[reason];
1908 }
1909
1910 /* Names used to print out the availability enum. */
1911 const char * const cgraph_availability_names[] =
1912 {"unset", "not_available", "overwritable", "available", "local"};
1913
1914 /* Output flags of edge to a file F. */
1915
1916 void
1917 cgraph_edge::dump_edge_flags (FILE *f)
1918 {
1919 if (speculative)
1920 fprintf (f, "(speculative) ");
1921 if (!inline_failed)
1922 fprintf (f, "(inlined) ");
1923 if (call_stmt_cannot_inline_p)
1924 fprintf (f, "(call_stmt_cannot_inline_p) ");
1925 if (indirect_inlining_edge)
1926 fprintf (f, "(indirect_inlining) ");
1927 if (count.initialized_p ())
1928 {
1929 fprintf (f, "(");
1930 count.dump (f);
1931 fprintf (f, ",");
1932 fprintf (f, "%.2f per call) ", sreal_frequency ().to_double ());
1933 }
1934 if (can_throw_external)
1935 fprintf (f, "(can throw external) ");
1936 }
1937
1938 /* Dump call graph node to file F. */
1939
1940 void
1941 cgraph_node::dump (FILE *f)
1942 {
1943 cgraph_edge *edge;
1944
1945 dump_base (f);
1946
1947 if (inlined_to)
1948 fprintf (f, " Function %s is inline copy in %s\n",
1949 dump_name (),
1950 inlined_to->dump_name ());
1951 if (clone_of)
1952 fprintf (f, " Clone of %s\n", clone_of->dump_asm_name ());
1953 if (symtab->function_flags_ready)
1954 fprintf (f, " Availability: %s\n",
1955 cgraph_availability_names [get_availability ()]);
1956
1957 if (profile_id)
1958 fprintf (f, " Profile id: %i\n",
1959 profile_id);
1960 cgraph_function_version_info *vi = function_version ();
1961 if (vi != NULL)
1962 {
1963 fprintf (f, " Version info: ");
1964 if (vi->prev != NULL)
1965 {
1966 fprintf (f, "prev: ");
1967 fprintf (f, "%s ", vi->prev->this_node->dump_asm_name ());
1968 }
1969 if (vi->next != NULL)
1970 {
1971 fprintf (f, "next: ");
1972 fprintf (f, "%s ", vi->next->this_node->dump_asm_name ());
1973 }
1974 if (vi->dispatcher_resolver != NULL_TREE)
1975 fprintf (f, "dispatcher: %s",
1976 lang_hooks.decl_printable_name (vi->dispatcher_resolver, 2));
1977
1978 fprintf (f, "\n");
1979 }
1980 fprintf (f, " Function flags:");
1981 if (count.initialized_p ())
1982 {
1983 fprintf (f, " count:");
1984 count.dump (f);
1985 }
1986 if (tp_first_run > 0)
1987 fprintf (f, " first_run:%i", tp_first_run);
1988 if (origin)
1989 fprintf (f, " nested in:%s", origin->asm_name ());
1990 if (gimple_has_body_p (decl))
1991 fprintf (f, " body");
1992 if (process)
1993 fprintf (f, " process");
1994 if (local)
1995 fprintf (f, " local");
1996 if (redefined_extern_inline)
1997 fprintf (f, " redefined_extern_inline");
1998 if (only_called_at_startup)
1999 fprintf (f, " only_called_at_startup");
2000 if (only_called_at_exit)
2001 fprintf (f, " only_called_at_exit");
2002 if (tm_clone)
2003 fprintf (f, " tm_clone");
2004 if (calls_comdat_local)
2005 fprintf (f, " calls_comdat_local");
2006 if (icf_merged)
2007 fprintf (f, " icf_merged");
2008 if (merged_comdat)
2009 fprintf (f, " merged_comdat");
2010 if (split_part)
2011 fprintf (f, " split_part");
2012 if (indirect_call_target)
2013 fprintf (f, " indirect_call_target");
2014 if (nonfreeing_fn)
2015 fprintf (f, " nonfreeing_fn");
2016 if (DECL_STATIC_CONSTRUCTOR (decl))
2017 fprintf (f," static_constructor (priority:%i)", get_init_priority ());
2018 if (DECL_STATIC_DESTRUCTOR (decl))
2019 fprintf (f," static_destructor (priority:%i)", get_fini_priority ());
2020 if (frequency == NODE_FREQUENCY_HOT)
2021 fprintf (f, " hot");
2022 if (frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
2023 fprintf (f, " unlikely_executed");
2024 if (frequency == NODE_FREQUENCY_EXECUTED_ONCE)
2025 fprintf (f, " executed_once");
2026 if (opt_for_fn (decl, optimize_size))
2027 fprintf (f, " optimize_size");
2028 if (parallelized_function)
2029 fprintf (f, " parallelized_function");
2030 if (DECL_IS_OPERATOR_NEW_P (decl))
2031 fprintf (f, " operator_new");
2032 if (DECL_IS_OPERATOR_DELETE_P (decl))
2033 fprintf (f, " operator_delete");
2034
2035
2036 fprintf (f, "\n");
2037
2038 if (thunk.thunk_p)
2039 {
2040 fprintf (f, " Thunk");
2041 if (thunk.alias)
2042 fprintf (f, " of %s (asm:%s)",
2043 lang_hooks.decl_printable_name (thunk.alias, 2),
2044 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk.alias)));
2045 fprintf (f, " fixed offset %i virtual value %i indirect_offset %i "
2046 "has virtual offset %i\n",
2047 (int)thunk.fixed_offset,
2048 (int)thunk.virtual_value,
2049 (int)thunk.indirect_offset,
2050 (int)thunk.virtual_offset_p);
2051 }
2052 else if (former_thunk_p ())
2053 fprintf (f, " Former thunk fixed offset %i virtual value %i "
2054 "indirect_offset %i has virtual offset %i\n",
2055 (int)thunk.fixed_offset,
2056 (int)thunk.virtual_value,
2057 (int)thunk.indirect_offset,
2058 (int)thunk.virtual_offset_p);
2059 if (alias && thunk.alias
2060 && DECL_P (thunk.alias))
2061 {
2062 fprintf (f, " Alias of %s",
2063 lang_hooks.decl_printable_name (thunk.alias, 2));
2064 if (DECL_ASSEMBLER_NAME_SET_P (thunk.alias))
2065 fprintf (f, " (asm:%s)",
2066 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk.alias)));
2067 fprintf (f, "\n");
2068 }
2069
2070 fprintf (f, " Called by: ");
2071
2072 profile_count sum = profile_count::zero ();
2073 for (edge = callers; edge; edge = edge->next_caller)
2074 {
2075 fprintf (f, "%s ", edge->caller->dump_name ());
2076 edge->dump_edge_flags (f);
2077 if (edge->count.initialized_p ())
2078 sum += edge->count.ipa ();
2079 }
2080
2081 fprintf (f, "\n Calls: ");
2082 for (edge = callees; edge; edge = edge->next_callee)
2083 {
2084 fprintf (f, "%s ", edge->callee->dump_name ());
2085 edge->dump_edge_flags (f);
2086 }
2087 fprintf (f, "\n");
2088
2089 if (count.ipa ().initialized_p ())
2090 {
2091 bool ok = true;
2092 bool min = false;
2093 ipa_ref *ref;
2094
2095 FOR_EACH_ALIAS (this, ref)
2096 if (dyn_cast <cgraph_node *> (ref->referring)->count.initialized_p ())
2097 sum += dyn_cast <cgraph_node *> (ref->referring)->count.ipa ();
2098
2099 if (inlined_to
2100 || (symtab->state < EXPANSION
2101 && ultimate_alias_target () == this && only_called_directly_p ()))
2102 ok = !count.ipa ().differs_from_p (sum);
2103 else if (count.ipa () > profile_count::from_gcov_type (100)
2104 && count.ipa () < sum.apply_scale (99, 100))
2105 ok = false, min = true;
2106 if (!ok)
2107 {
2108 fprintf (f, " Invalid sum of caller counts ");
2109 sum.dump (f);
2110 if (min)
2111 fprintf (f, ", should be at most ");
2112 else
2113 fprintf (f, ", should be ");
2114 count.ipa ().dump (f);
2115 fprintf (f, "\n");
2116 }
2117 }
2118
2119 for (edge = indirect_calls; edge; edge = edge->next_callee)
2120 {
2121 if (edge->indirect_info->polymorphic)
2122 {
2123 fprintf (f, " Polymorphic indirect call of type ");
2124 print_generic_expr (f, edge->indirect_info->otr_type, TDF_SLIM);
2125 fprintf (f, " token:%i", (int) edge->indirect_info->otr_token);
2126 }
2127 else
2128 fprintf (f, " Indirect call");
2129 edge->dump_edge_flags (f);
2130 if (edge->indirect_info->param_index != -1)
2131 {
2132 fprintf (f, " of param:%i", edge->indirect_info->param_index);
2133 if (edge->indirect_info->agg_contents)
2134 fprintf (f, " loaded from %s %s at offset %i",
2135 edge->indirect_info->member_ptr ? "member ptr" : "aggregate",
2136 edge->indirect_info->by_ref ? "passed by reference":"",
2137 (int)edge->indirect_info->offset);
2138 if (edge->indirect_info->vptr_changed)
2139 fprintf (f, " (vptr maybe changed)");
2140 }
2141 fprintf (f, "\n");
2142 if (edge->indirect_info->polymorphic)
2143 edge->indirect_info->context.dump (f);
2144 }
2145 }
2146
2147 /* Dump call graph node to file F in graphviz format. */
2148
2149 void
2150 cgraph_node::dump_graphviz (FILE *f)
2151 {
2152 cgraph_edge *edge;
2153
2154 for (edge = callees; edge; edge = edge->next_callee)
2155 {
2156 cgraph_node *callee = edge->callee;
2157
2158 fprintf (f, "\t\"%s\" -> \"%s\"\n", dump_name (), callee->dump_name ());
2159 }
2160 }
2161
2162
2163 /* Dump call graph node NODE to stderr. */
2164
2165 DEBUG_FUNCTION void
2166 cgraph_node::debug (void)
2167 {
2168 dump (stderr);
2169 }
2170
2171 /* Dump the callgraph to file F. */
2172
2173 void
2174 cgraph_node::dump_cgraph (FILE *f)
2175 {
2176 cgraph_node *node;
2177
2178 fprintf (f, "callgraph:\n\n");
2179 FOR_EACH_FUNCTION (node)
2180 node->dump (f);
2181 }
2182
2183 /* Return true when the DECL can possibly be inlined. */
2184
2185 bool
2186 cgraph_function_possibly_inlined_p (tree decl)
2187 {
2188 if (!symtab->global_info_ready)
2189 return !DECL_UNINLINABLE (decl);
2190 return DECL_POSSIBLY_INLINED (decl);
2191 }
2192
2193 /* cgraph_node is no longer nested function; update cgraph accordingly. */
2194 void
2195 cgraph_node::unnest (void)
2196 {
2197 cgraph_node **node2 = &origin->nested;
2198 gcc_assert (origin);
2199
2200 while (*node2 != this)
2201 node2 = &(*node2)->next_nested;
2202 *node2 = next_nested;
2203 origin = NULL;
2204 }
2205
2206 /* Return function availability. See cgraph.h for description of individual
2207 return values. */
2208 enum availability
2209 cgraph_node::get_availability (symtab_node *ref)
2210 {
2211 if (ref)
2212 {
2213 cgraph_node *cref = dyn_cast <cgraph_node *> (ref);
2214 if (cref)
2215 ref = cref->inlined_to;
2216 }
2217 enum availability avail;
2218 if (!analyzed)
2219 avail = AVAIL_NOT_AVAILABLE;
2220 else if (local)
2221 avail = AVAIL_LOCAL;
2222 else if (inlined_to)
2223 avail = AVAIL_AVAILABLE;
2224 else if (transparent_alias)
2225 ultimate_alias_target (&avail, ref);
2226 else if (ifunc_resolver
2227 || lookup_attribute ("noipa", DECL_ATTRIBUTES (decl)))
2228 avail = AVAIL_INTERPOSABLE;
2229 else if (!externally_visible)
2230 avail = AVAIL_AVAILABLE;
2231 /* If this is a reference from symbol itself and there are no aliases, we
2232 may be sure that the symbol was not interposed by something else because
2233 the symbol itself would be unreachable otherwise.
2234
2235 Also comdat groups are always resolved in groups. */
2236 else if ((this == ref && !has_aliases_p ())
2237 || (ref && get_comdat_group ()
2238 && get_comdat_group () == ref->get_comdat_group ()))
2239 avail = AVAIL_AVAILABLE;
2240 /* Inline functions are safe to be analyzed even if their symbol can
2241 be overwritten at runtime. It is not meaningful to enforce any sane
2242 behavior on replacing inline function by different body. */
2243 else if (DECL_DECLARED_INLINE_P (decl))
2244 avail = AVAIL_AVAILABLE;
2245
2246 /* If the function can be overwritten, return OVERWRITABLE. Take
2247 care at least of two notable extensions - the COMDAT functions
2248 used to share template instantiations in C++ (this is symmetric
2249 to code cp_cannot_inline_tree_fn and probably shall be shared and
2250 the inlinability hooks completely eliminated). */
2251
2252 else if (decl_replaceable_p (decl) && !DECL_EXTERNAL (decl))
2253 avail = AVAIL_INTERPOSABLE;
2254 else avail = AVAIL_AVAILABLE;
2255
2256 return avail;
2257 }
2258
2259 /* Worker for cgraph_node_can_be_local_p. */
2260 static bool
2261 cgraph_node_cannot_be_local_p_1 (cgraph_node *node, void *)
2262 {
2263 return !(!node->force_output
2264 && ((DECL_COMDAT (node->decl)
2265 && !node->forced_by_abi
2266 && !node->used_from_object_file_p ()
2267 && !node->same_comdat_group)
2268 || !node->externally_visible));
2269 }
2270
2271 /* Return true if cgraph_node can be made local for API change.
2272 Extern inline functions and C++ COMDAT functions can be made local
2273 at the expense of possible code size growth if function is used in multiple
2274 compilation units. */
2275 bool
2276 cgraph_node::can_be_local_p (void)
2277 {
2278 return (!address_taken
2279 && !call_for_symbol_thunks_and_aliases (cgraph_node_cannot_be_local_p_1,
2280 NULL, true));
2281 }
2282
2283 /* Call callback on cgraph_node, thunks and aliases associated to cgraph_node.
2284 When INCLUDE_OVERWRITABLE is false, overwritable symbols are
2285 skipped. When EXCLUDE_VIRTUAL_THUNKS is true, virtual thunks are
2286 skipped. */
2287 bool
2288 cgraph_node::call_for_symbol_thunks_and_aliases (bool (*callback)
2289 (cgraph_node *, void *),
2290 void *data,
2291 bool include_overwritable,
2292 bool exclude_virtual_thunks)
2293 {
2294 cgraph_edge *e;
2295 ipa_ref *ref;
2296 enum availability avail = AVAIL_AVAILABLE;
2297
2298 if (include_overwritable
2299 || (avail = get_availability ()) > AVAIL_INTERPOSABLE)
2300 {
2301 if (callback (this, data))
2302 return true;
2303 }
2304 FOR_EACH_ALIAS (this, ref)
2305 {
2306 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2307 if (include_overwritable
2308 || alias->get_availability () > AVAIL_INTERPOSABLE)
2309 if (alias->call_for_symbol_thunks_and_aliases (callback, data,
2310 include_overwritable,
2311 exclude_virtual_thunks))
2312 return true;
2313 }
2314 if (avail <= AVAIL_INTERPOSABLE)
2315 return false;
2316 for (e = callers; e; e = e->next_caller)
2317 if (e->caller->thunk.thunk_p
2318 && (include_overwritable
2319 || e->caller->get_availability () > AVAIL_INTERPOSABLE)
2320 && !(exclude_virtual_thunks
2321 && e->caller->thunk.virtual_offset_p))
2322 if (e->caller->call_for_symbol_thunks_and_aliases (callback, data,
2323 include_overwritable,
2324 exclude_virtual_thunks))
2325 return true;
2326
2327 return false;
2328 }
2329
2330 /* Worker to bring NODE local. */
2331
2332 bool
2333 cgraph_node::make_local (cgraph_node *node, void *)
2334 {
2335 gcc_checking_assert (node->can_be_local_p ());
2336 if (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
2337 {
2338 node->make_decl_local ();
2339 node->set_section (NULL);
2340 node->set_comdat_group (NULL);
2341 node->externally_visible = false;
2342 node->forced_by_abi = false;
2343 node->local = true;
2344 node->set_section (NULL);
2345 node->unique_name = ((node->resolution == LDPR_PREVAILING_DEF_IRONLY
2346 || node->resolution == LDPR_PREVAILING_DEF_IRONLY_EXP)
2347 && !flag_incremental_link);
2348 node->resolution = LDPR_PREVAILING_DEF_IRONLY;
2349 gcc_assert (node->get_availability () == AVAIL_LOCAL);
2350 }
2351 return false;
2352 }
2353
2354 /* Bring cgraph node local. */
2355
2356 void
2357 cgraph_node::make_local (void)
2358 {
2359 call_for_symbol_thunks_and_aliases (cgraph_node::make_local, NULL, true);
2360 }
2361
2362 /* Worker to set nothrow flag. */
2363
2364 static void
2365 set_nothrow_flag_1 (cgraph_node *node, bool nothrow, bool non_call,
2366 bool *changed)
2367 {
2368 cgraph_edge *e;
2369
2370 if (nothrow && !TREE_NOTHROW (node->decl))
2371 {
2372 /* With non-call exceptions we can't say for sure if other function body
2373 was not possibly optimized to stil throw. */
2374 if (!non_call || node->binds_to_current_def_p ())
2375 {
2376 TREE_NOTHROW (node->decl) = true;
2377 *changed = true;
2378 for (e = node->callers; e; e = e->next_caller)
2379 e->can_throw_external = false;
2380 }
2381 }
2382 else if (!nothrow && TREE_NOTHROW (node->decl))
2383 {
2384 TREE_NOTHROW (node->decl) = false;
2385 *changed = true;
2386 }
2387 ipa_ref *ref;
2388 FOR_EACH_ALIAS (node, ref)
2389 {
2390 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2391 if (!nothrow || alias->get_availability () > AVAIL_INTERPOSABLE)
2392 set_nothrow_flag_1 (alias, nothrow, non_call, changed);
2393 }
2394 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
2395 if (e->caller->thunk.thunk_p
2396 && (!nothrow || e->caller->get_availability () > AVAIL_INTERPOSABLE))
2397 set_nothrow_flag_1 (e->caller, nothrow, non_call, changed);
2398 }
2399
2400 /* Set TREE_NOTHROW on NODE's decl and on aliases of NODE
2401 if any to NOTHROW. */
2402
2403 bool
2404 cgraph_node::set_nothrow_flag (bool nothrow)
2405 {
2406 bool changed = false;
2407 bool non_call = opt_for_fn (decl, flag_non_call_exceptions);
2408
2409 if (!nothrow || get_availability () > AVAIL_INTERPOSABLE)
2410 set_nothrow_flag_1 (this, nothrow, non_call, &changed);
2411 else
2412 {
2413 ipa_ref *ref;
2414
2415 FOR_EACH_ALIAS (this, ref)
2416 {
2417 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2418 if (!nothrow || alias->get_availability () > AVAIL_INTERPOSABLE)
2419 set_nothrow_flag_1 (alias, nothrow, non_call, &changed);
2420 }
2421 }
2422 return changed;
2423 }
2424
2425 /* Worker to set malloc flag. */
2426 static void
2427 set_malloc_flag_1 (cgraph_node *node, bool malloc_p, bool *changed)
2428 {
2429 if (malloc_p && !DECL_IS_MALLOC (node->decl))
2430 {
2431 DECL_IS_MALLOC (node->decl) = true;
2432 *changed = true;
2433 }
2434
2435 ipa_ref *ref;
2436 FOR_EACH_ALIAS (node, ref)
2437 {
2438 cgraph_node *alias = dyn_cast<cgraph_node *> (ref->referring);
2439 if (!malloc_p || alias->get_availability () > AVAIL_INTERPOSABLE)
2440 set_malloc_flag_1 (alias, malloc_p, changed);
2441 }
2442
2443 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
2444 if (e->caller->thunk.thunk_p
2445 && (!malloc_p || e->caller->get_availability () > AVAIL_INTERPOSABLE))
2446 set_malloc_flag_1 (e->caller, malloc_p, changed);
2447 }
2448
2449 /* Set DECL_IS_MALLOC on NODE's decl and on NODE's aliases if any. */
2450
2451 bool
2452 cgraph_node::set_malloc_flag (bool malloc_p)
2453 {
2454 bool changed = false;
2455
2456 if (!malloc_p || get_availability () > AVAIL_INTERPOSABLE)
2457 set_malloc_flag_1 (this, malloc_p, &changed);
2458 else
2459 {
2460 ipa_ref *ref;
2461
2462 FOR_EACH_ALIAS (this, ref)
2463 {
2464 cgraph_node *alias = dyn_cast<cgraph_node *> (ref->referring);
2465 if (!malloc_p || alias->get_availability () > AVAIL_INTERPOSABLE)
2466 set_malloc_flag_1 (alias, malloc_p, &changed);
2467 }
2468 }
2469 return changed;
2470 }
2471
2472 /* Worker to set_const_flag. */
2473
2474 static void
2475 set_const_flag_1 (cgraph_node *node, bool set_const, bool looping,
2476 bool *changed)
2477 {
2478 /* Static constructors and destructors without a side effect can be
2479 optimized out. */
2480 if (set_const && !looping)
2481 {
2482 if (DECL_STATIC_CONSTRUCTOR (node->decl))
2483 {
2484 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2485 *changed = true;
2486 }
2487 if (DECL_STATIC_DESTRUCTOR (node->decl))
2488 {
2489 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2490 *changed = true;
2491 }
2492 }
2493 if (!set_const)
2494 {
2495 if (TREE_READONLY (node->decl))
2496 {
2497 TREE_READONLY (node->decl) = 0;
2498 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2499 *changed = true;
2500 }
2501 }
2502 else
2503 {
2504 /* Consider function:
2505
2506 bool a(int *p)
2507 {
2508 return *p==*p;
2509 }
2510
2511 During early optimization we will turn this into:
2512
2513 bool a(int *p)
2514 {
2515 return true;
2516 }
2517
2518 Now if this function will be detected as CONST however when interposed
2519 it may end up being just pure. We always must assume the worst
2520 scenario here. */
2521 if (TREE_READONLY (node->decl))
2522 {
2523 if (!looping && DECL_LOOPING_CONST_OR_PURE_P (node->decl))
2524 {
2525 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2526 *changed = true;
2527 }
2528 }
2529 else if (node->binds_to_current_def_p ())
2530 {
2531 TREE_READONLY (node->decl) = true;
2532 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = looping;
2533 DECL_PURE_P (node->decl) = false;
2534 *changed = true;
2535 }
2536 else
2537 {
2538 if (dump_file && (dump_flags & TDF_DETAILS))
2539 fprintf (dump_file, "Dropping state to PURE because function does "
2540 "not bind to current def.\n");
2541 if (!DECL_PURE_P (node->decl))
2542 {
2543 DECL_PURE_P (node->decl) = true;
2544 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = looping;
2545 *changed = true;
2546 }
2547 else if (!looping && DECL_LOOPING_CONST_OR_PURE_P (node->decl))
2548 {
2549 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2550 *changed = true;
2551 }
2552 }
2553 }
2554
2555 ipa_ref *ref;
2556 FOR_EACH_ALIAS (node, ref)
2557 {
2558 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2559 if (!set_const || alias->get_availability () > AVAIL_INTERPOSABLE)
2560 set_const_flag_1 (alias, set_const, looping, changed);
2561 }
2562 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
2563 if (e->caller->thunk.thunk_p
2564 && (!set_const || e->caller->get_availability () > AVAIL_INTERPOSABLE))
2565 {
2566 /* Virtual thunks access virtual offset in the vtable, so they can
2567 only be pure, never const. */
2568 if (set_const
2569 && (e->caller->thunk.virtual_offset_p
2570 || !node->binds_to_current_def_p (e->caller)))
2571 *changed |= e->caller->set_pure_flag (true, looping);
2572 else
2573 set_const_flag_1 (e->caller, set_const, looping, changed);
2574 }
2575 }
2576
2577 /* If SET_CONST is true, mark function, aliases and thunks to be ECF_CONST.
2578 If SET_CONST if false, clear the flag.
2579
2580 When setting the flag be careful about possible interposition and
2581 do not set the flag for functions that can be interposet and set pure
2582 flag for functions that can bind to other definition.
2583
2584 Return true if any change was done. */
2585
2586 bool
2587 cgraph_node::set_const_flag (bool set_const, bool looping)
2588 {
2589 bool changed = false;
2590 if (!set_const || get_availability () > AVAIL_INTERPOSABLE)
2591 set_const_flag_1 (this, set_const, looping, &changed);
2592 else
2593 {
2594 ipa_ref *ref;
2595
2596 FOR_EACH_ALIAS (this, ref)
2597 {
2598 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2599 if (!set_const || alias->get_availability () > AVAIL_INTERPOSABLE)
2600 set_const_flag_1 (alias, set_const, looping, &changed);
2601 }
2602 }
2603 return changed;
2604 }
2605
2606 /* Info used by set_pure_flag_1. */
2607
2608 struct set_pure_flag_info
2609 {
2610 bool pure;
2611 bool looping;
2612 bool changed;
2613 };
2614
2615 /* Worker to set_pure_flag. */
2616
2617 static bool
2618 set_pure_flag_1 (cgraph_node *node, void *data)
2619 {
2620 struct set_pure_flag_info *info = (struct set_pure_flag_info *)data;
2621 /* Static constructors and destructors without a side effect can be
2622 optimized out. */
2623 if (info->pure && !info->looping)
2624 {
2625 if (DECL_STATIC_CONSTRUCTOR (node->decl))
2626 {
2627 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2628 info->changed = true;
2629 }
2630 if (DECL_STATIC_DESTRUCTOR (node->decl))
2631 {
2632 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2633 info->changed = true;
2634 }
2635 }
2636 if (info->pure)
2637 {
2638 if (!DECL_PURE_P (node->decl) && !TREE_READONLY (node->decl))
2639 {
2640 DECL_PURE_P (node->decl) = true;
2641 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = info->looping;
2642 info->changed = true;
2643 }
2644 else if (DECL_LOOPING_CONST_OR_PURE_P (node->decl)
2645 && !info->looping)
2646 {
2647 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2648 info->changed = true;
2649 }
2650 }
2651 else
2652 {
2653 if (DECL_PURE_P (node->decl))
2654 {
2655 DECL_PURE_P (node->decl) = false;
2656 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2657 info->changed = true;
2658 }
2659 }
2660 return false;
2661 }
2662
2663 /* Set DECL_PURE_P on cgraph_node's decl and on aliases of the node
2664 if any to PURE.
2665
2666 When setting the flag, be careful about possible interposition.
2667 Return true if any change was done. */
2668
2669 bool
2670 cgraph_node::set_pure_flag (bool pure, bool looping)
2671 {
2672 struct set_pure_flag_info info = {pure, looping, false};
2673 call_for_symbol_thunks_and_aliases (set_pure_flag_1, &info, !pure, true);
2674 return info.changed;
2675 }
2676
2677 /* Return true when cgraph_node cannot return or throw and thus
2678 it is safe to ignore its side effects for IPA analysis. */
2679
2680 bool
2681 cgraph_node::cannot_return_p (void)
2682 {
2683 int flags = flags_from_decl_or_type (decl);
2684 if (!opt_for_fn (decl, flag_exceptions))
2685 return (flags & ECF_NORETURN) != 0;
2686 else
2687 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2688 == (ECF_NORETURN | ECF_NOTHROW));
2689 }
2690
2691 /* Return true when call of edge cannot lead to return from caller
2692 and thus it is safe to ignore its side effects for IPA analysis
2693 when computing side effects of the caller.
2694 FIXME: We could actually mark all edges that have no reaching
2695 patch to the exit block or throw to get better results. */
2696 bool
2697 cgraph_edge::cannot_lead_to_return_p (void)
2698 {
2699 if (caller->cannot_return_p ())
2700 return true;
2701 if (indirect_unknown_callee)
2702 {
2703 int flags = indirect_info->ecf_flags;
2704 if (!opt_for_fn (caller->decl, flag_exceptions))
2705 return (flags & ECF_NORETURN) != 0;
2706 else
2707 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2708 == (ECF_NORETURN | ECF_NOTHROW));
2709 }
2710 else
2711 return callee->cannot_return_p ();
2712 }
2713
2714 /* Return true if the edge may be considered hot. */
2715
2716 bool
2717 cgraph_edge::maybe_hot_p (void)
2718 {
2719 if (!maybe_hot_count_p (NULL, count.ipa ()))
2720 return false;
2721 if (caller->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED
2722 || (callee
2723 && callee->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED))
2724 return false;
2725 if (caller->frequency > NODE_FREQUENCY_UNLIKELY_EXECUTED
2726 && (callee
2727 && callee->frequency <= NODE_FREQUENCY_EXECUTED_ONCE))
2728 return false;
2729 if (opt_for_fn (caller->decl, optimize_size))
2730 return false;
2731 if (caller->frequency == NODE_FREQUENCY_HOT)
2732 return true;
2733 /* If profile is now known yet, be conservative.
2734 FIXME: this predicate is used by early inliner and can do better there. */
2735 if (symtab->state < IPA_SSA)
2736 return true;
2737 if (caller->frequency == NODE_FREQUENCY_EXECUTED_ONCE
2738 && sreal_frequency () * 2 < 3)
2739 return false;
2740 if (sreal_frequency () * PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION) <= 1)
2741 return false;
2742 return true;
2743 }
2744
2745 /* Worker for cgraph_can_remove_if_no_direct_calls_p. */
2746
2747 static bool
2748 nonremovable_p (cgraph_node *node, void *)
2749 {
2750 return !node->can_remove_if_no_direct_calls_and_refs_p ();
2751 }
2752
2753 /* Return true if whole comdat group can be removed if there are no direct
2754 calls to THIS. */
2755
2756 bool
2757 cgraph_node::can_remove_if_no_direct_calls_p (bool will_inline)
2758 {
2759 struct ipa_ref *ref;
2760
2761 /* For local symbols or non-comdat group it is the same as
2762 can_remove_if_no_direct_calls_p. */
2763 if (!externally_visible || !same_comdat_group)
2764 {
2765 if (DECL_EXTERNAL (decl))
2766 return true;
2767 if (address_taken)
2768 return false;
2769 return !call_for_symbol_and_aliases (nonremovable_p, NULL, true);
2770 }
2771
2772 if (will_inline && address_taken)
2773 return false;
2774
2775 /* Otheriwse check if we can remove the symbol itself and then verify
2776 that only uses of the comdat groups are direct call to THIS
2777 or its aliases. */
2778 if (!can_remove_if_no_direct_calls_and_refs_p ())
2779 return false;
2780
2781 /* Check that all refs come from within the comdat group. */
2782 for (int i = 0; iterate_referring (i, ref); i++)
2783 if (ref->referring->get_comdat_group () != get_comdat_group ())
2784 return false;
2785
2786 struct cgraph_node *target = ultimate_alias_target ();
2787 for (cgraph_node *next = dyn_cast<cgraph_node *> (same_comdat_group);
2788 next != this; next = dyn_cast<cgraph_node *> (next->same_comdat_group))
2789 {
2790 if (!externally_visible)
2791 continue;
2792 if (!next->alias
2793 && !next->can_remove_if_no_direct_calls_and_refs_p ())
2794 return false;
2795
2796 /* If we see different symbol than THIS, be sure to check calls. */
2797 if (next->ultimate_alias_target () != target)
2798 for (cgraph_edge *e = next->callers; e; e = e->next_caller)
2799 if (e->caller->get_comdat_group () != get_comdat_group ()
2800 || will_inline)
2801 return false;
2802
2803 /* If function is not being inlined, we care only about
2804 references outside of the comdat group. */
2805 if (!will_inline)
2806 for (int i = 0; next->iterate_referring (i, ref); i++)
2807 if (ref->referring->get_comdat_group () != get_comdat_group ())
2808 return false;
2809 }
2810 return true;
2811 }
2812
2813 /* Return true when function cgraph_node can be expected to be removed
2814 from program when direct calls in this compilation unit are removed.
2815
2816 As a special case COMDAT functions are
2817 cgraph_can_remove_if_no_direct_calls_p while the are not
2818 cgraph_only_called_directly_p (it is possible they are called from other
2819 unit)
2820
2821 This function behaves as cgraph_only_called_directly_p because eliminating
2822 all uses of COMDAT function does not make it necessarily disappear from
2823 the program unless we are compiling whole program or we do LTO. In this
2824 case we know we win since dynamic linking will not really discard the
2825 linkonce section. */
2826
2827 bool
2828 cgraph_node::will_be_removed_from_program_if_no_direct_calls_p
2829 (bool will_inline)
2830 {
2831 gcc_assert (!inlined_to);
2832 if (DECL_EXTERNAL (decl))
2833 return true;
2834
2835 if (!in_lto_p && !flag_whole_program)
2836 {
2837 /* If the symbol is in comdat group, we need to verify that whole comdat
2838 group becomes unreachable. Technically we could skip references from
2839 within the group, too. */
2840 if (!only_called_directly_p ())
2841 return false;
2842 if (same_comdat_group && externally_visible)
2843 {
2844 struct cgraph_node *target = ultimate_alias_target ();
2845
2846 if (will_inline && address_taken)
2847 return true;
2848 for (cgraph_node *next = dyn_cast<cgraph_node *> (same_comdat_group);
2849 next != this;
2850 next = dyn_cast<cgraph_node *> (next->same_comdat_group))
2851 {
2852 if (!externally_visible)
2853 continue;
2854 if (!next->alias
2855 && !next->only_called_directly_p ())
2856 return false;
2857
2858 /* If we see different symbol than THIS,
2859 be sure to check calls. */
2860 if (next->ultimate_alias_target () != target)
2861 for (cgraph_edge *e = next->callers; e; e = e->next_caller)
2862 if (e->caller->get_comdat_group () != get_comdat_group ()
2863 || will_inline)
2864 return false;
2865 }
2866 }
2867 return true;
2868 }
2869 else
2870 return can_remove_if_no_direct_calls_p (will_inline);
2871 }
2872
2873
2874 /* Worker for cgraph_only_called_directly_p. */
2875
2876 static bool
2877 cgraph_not_only_called_directly_p_1 (cgraph_node *node, void *)
2878 {
2879 return !node->only_called_directly_or_aliased_p ();
2880 }
2881
2882 /* Return true when function cgraph_node and all its aliases are only called
2883 directly.
2884 i.e. it is not externally visible, address was not taken and
2885 it is not used in any other non-standard way. */
2886
2887 bool
2888 cgraph_node::only_called_directly_p (void)
2889 {
2890 gcc_assert (ultimate_alias_target () == this);
2891 return !call_for_symbol_and_aliases (cgraph_not_only_called_directly_p_1,
2892 NULL, true);
2893 }
2894
2895
2896 /* Collect all callers of NODE. Worker for collect_callers_of_node. */
2897
2898 static bool
2899 collect_callers_of_node_1 (cgraph_node *node, void *data)
2900 {
2901 vec<cgraph_edge *> *redirect_callers = (vec<cgraph_edge *> *)data;
2902 cgraph_edge *cs;
2903 enum availability avail;
2904 node->ultimate_alias_target (&avail);
2905
2906 if (avail > AVAIL_INTERPOSABLE)
2907 for (cs = node->callers; cs != NULL; cs = cs->next_caller)
2908 if (!cs->indirect_inlining_edge
2909 && !cs->caller->thunk.thunk_p)
2910 redirect_callers->safe_push (cs);
2911 return false;
2912 }
2913
2914 /* Collect all callers of cgraph_node and its aliases that are known to lead to
2915 cgraph_node (i.e. are not overwritable). */
2916
2917 vec<cgraph_edge *>
2918 cgraph_node::collect_callers (void)
2919 {
2920 vec<cgraph_edge *> redirect_callers = vNULL;
2921 call_for_symbol_thunks_and_aliases (collect_callers_of_node_1,
2922 &redirect_callers, false);
2923 return redirect_callers;
2924 }
2925
2926
2927 /* Return TRUE if NODE2 a clone of NODE or is equivalent to it. Return
2928 optimistically true if this cannot be determined. */
2929
2930 static bool
2931 clone_of_p (cgraph_node *node, cgraph_node *node2)
2932 {
2933 node = node->ultimate_alias_target ();
2934 node2 = node2->ultimate_alias_target ();
2935
2936 if (node2->clone_of == node
2937 || node2->former_clone_of == node->decl)
2938 return true;
2939
2940 if (!node->thunk.thunk_p && !node->former_thunk_p ())
2941 {
2942 while (node2 && node->decl != node2->decl)
2943 node2 = node2->clone_of;
2944 return node2 != NULL;
2945 }
2946
2947 /* There are no virtual clones of thunks so check former_clone_of or if we
2948 might have skipped thunks because this adjustments are no longer
2949 necessary. */
2950 while (node->thunk.thunk_p || node->former_thunk_p ())
2951 {
2952 if (!node->thunk.this_adjusting)
2953 return false;
2954 /* In case of instrumented expanded thunks, which can have multiple calls
2955 in them, we do not know how to continue and just have to be
2956 optimistic. */
2957 if (node->callees->next_callee)
2958 return true;
2959 node = node->callees->callee->ultimate_alias_target ();
2960
2961 if (!node2->clone.param_adjustments
2962 || node2->clone.param_adjustments->first_param_intact_p ())
2963 return false;
2964 if (node2->former_clone_of == node->decl)
2965 return true;
2966
2967 cgraph_node *n2 = node2;
2968 while (n2 && node->decl != n2->decl)
2969 n2 = n2->clone_of;
2970 if (n2)
2971 return true;
2972 }
2973
2974 return false;
2975 }
2976
2977 /* Verify edge count and frequency. */
2978
2979 bool
2980 cgraph_edge::verify_count ()
2981 {
2982 bool error_found = false;
2983 if (!count.verify ())
2984 {
2985 error ("caller edge count invalid");
2986 error_found = true;
2987 }
2988 return error_found;
2989 }
2990
2991 /* Switch to THIS_CFUN if needed and print STMT to stderr. */
2992 static void
2993 cgraph_debug_gimple_stmt (function *this_cfun, gimple *stmt)
2994 {
2995 bool fndecl_was_null = false;
2996 /* debug_gimple_stmt needs correct cfun */
2997 if (cfun != this_cfun)
2998 set_cfun (this_cfun);
2999 /* ...and an actual current_function_decl */
3000 if (!current_function_decl)
3001 {
3002 current_function_decl = this_cfun->decl;
3003 fndecl_was_null = true;
3004 }
3005 debug_gimple_stmt (stmt);
3006 if (fndecl_was_null)
3007 current_function_decl = NULL;
3008 }
3009
3010 /* Verify that call graph edge corresponds to DECL from the associated
3011 statement. Return true if the verification should fail. */
3012
3013 bool
3014 cgraph_edge::verify_corresponds_to_fndecl (tree decl)
3015 {
3016 cgraph_node *node;
3017
3018 if (!decl || callee->inlined_to)
3019 return false;
3020 if (symtab->state == LTO_STREAMING)
3021 return false;
3022 node = cgraph_node::get (decl);
3023
3024 /* We do not know if a node from a different partition is an alias or what it
3025 aliases and therefore cannot do the former_clone_of check reliably. When
3026 body_removed is set, we have lost all information about what was alias or
3027 thunk of and also cannot proceed. */
3028 if (!node
3029 || node->body_removed
3030 || node->in_other_partition
3031 || callee->icf_merged
3032 || callee->in_other_partition)
3033 return false;
3034
3035 node = node->ultimate_alias_target ();
3036
3037 /* Optimizers can redirect unreachable calls or calls triggering undefined
3038 behavior to builtin_unreachable. */
3039
3040 if (fndecl_built_in_p (callee->decl, BUILT_IN_UNREACHABLE))
3041 return false;
3042
3043 if (callee->former_clone_of != node->decl
3044 && (node != callee->ultimate_alias_target ())
3045 && !clone_of_p (node, callee))
3046 return true;
3047 else
3048 return false;
3049 }
3050
3051 /* Disable warnings about missing quoting in GCC diagnostics for
3052 the verification errors. Their format strings don't follow GCC
3053 diagnostic conventions and the calls are ultimately followed by
3054 one to internal_error. */
3055 #if __GNUC__ >= 10
3056 # pragma GCC diagnostic push
3057 # pragma GCC diagnostic ignored "-Wformat-diag"
3058 #endif
3059
3060 /* Verify cgraph nodes of given cgraph node. */
3061 DEBUG_FUNCTION void
3062 cgraph_node::verify_node (void)
3063 {
3064 cgraph_edge *e;
3065 function *this_cfun = DECL_STRUCT_FUNCTION (decl);
3066 basic_block this_block;
3067 gimple_stmt_iterator gsi;
3068 bool error_found = false;
3069
3070 if (seen_error ())
3071 return;
3072
3073 timevar_push (TV_CGRAPH_VERIFY);
3074 error_found |= verify_base ();
3075 for (e = callees; e; e = e->next_callee)
3076 if (e->aux)
3077 {
3078 error ("aux field set for edge %s->%s",
3079 identifier_to_locale (e->caller->name ()),
3080 identifier_to_locale (e->callee->name ()));
3081 error_found = true;
3082 }
3083 if (!count.verify ())
3084 {
3085 error ("cgraph count invalid");
3086 error_found = true;
3087 }
3088 if (inlined_to && same_comdat_group)
3089 {
3090 error ("inline clone in same comdat group list");
3091 error_found = true;
3092 }
3093 if (!definition && !in_other_partition && local)
3094 {
3095 error ("local symbols must be defined");
3096 error_found = true;
3097 }
3098 if (inlined_to && externally_visible)
3099 {
3100 error ("externally visible inline clone");
3101 error_found = true;
3102 }
3103 if (inlined_to && address_taken)
3104 {
3105 error ("inline clone with address taken");
3106 error_found = true;
3107 }
3108 if (inlined_to && force_output)
3109 {
3110 error ("inline clone is forced to output");
3111 error_found = true;
3112 }
3113 for (e = indirect_calls; e; e = e->next_callee)
3114 {
3115 if (e->aux)
3116 {
3117 error ("aux field set for indirect edge from %s",
3118 identifier_to_locale (e->caller->name ()));
3119 error_found = true;
3120 }
3121 if (!e->indirect_unknown_callee
3122 || !e->indirect_info)
3123 {
3124 error ("An indirect edge from %s is not marked as indirect or has "
3125 "associated indirect_info, the corresponding statement is: ",
3126 identifier_to_locale (e->caller->name ()));
3127 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3128 error_found = true;
3129 }
3130 }
3131 bool check_comdat = comdat_local_p ();
3132 for (e = callers; e; e = e->next_caller)
3133 {
3134 if (e->verify_count ())
3135 error_found = true;
3136 if (check_comdat
3137 && !in_same_comdat_group_p (e->caller))
3138 {
3139 error ("comdat-local function called by %s outside its comdat",
3140 identifier_to_locale (e->caller->name ()));
3141 error_found = true;
3142 }
3143 if (!e->inline_failed)
3144 {
3145 if (inlined_to
3146 != (e->caller->inlined_to
3147 ? e->caller->inlined_to : e->caller))
3148 {
3149 error ("inlined_to pointer is wrong");
3150 error_found = true;
3151 }
3152 if (callers->next_caller)
3153 {
3154 error ("multiple inline callers");
3155 error_found = true;
3156 }
3157 }
3158 else
3159 if (inlined_to)
3160 {
3161 error ("inlined_to pointer set for noninline callers");
3162 error_found = true;
3163 }
3164 }
3165 for (e = callees; e; e = e->next_callee)
3166 {
3167 if (e->verify_count ())
3168 error_found = true;
3169 if (gimple_has_body_p (e->caller->decl)
3170 && !e->caller->inlined_to
3171 && !e->speculative
3172 /* Optimized out calls are redirected to __builtin_unreachable. */
3173 && (e->count.nonzero_p ()
3174 || ! e->callee->decl
3175 || !fndecl_built_in_p (e->callee->decl, BUILT_IN_UNREACHABLE))
3176 && count
3177 == ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (decl))->count
3178 && (!e->count.ipa_p ()
3179 && e->count.differs_from_p (gimple_bb (e->call_stmt)->count)))
3180 {
3181 error ("caller edge count does not match BB count");
3182 fprintf (stderr, "edge count: ");
3183 e->count.dump (stderr);
3184 fprintf (stderr, "\n bb count: ");
3185 gimple_bb (e->call_stmt)->count.dump (stderr);
3186 fprintf (stderr, "\n");
3187 error_found = true;
3188 }
3189 }
3190 for (e = indirect_calls; e; e = e->next_callee)
3191 {
3192 if (e->verify_count ())
3193 error_found = true;
3194 if (gimple_has_body_p (e->caller->decl)
3195 && !e->caller->inlined_to
3196 && !e->speculative
3197 && e->count.ipa_p ()
3198 && count
3199 == ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (decl))->count
3200 && (!e->count.ipa_p ()
3201 && e->count.differs_from_p (gimple_bb (e->call_stmt)->count)))
3202 {
3203 error ("indirect call count does not match BB count");
3204 fprintf (stderr, "edge count: ");
3205 e->count.dump (stderr);
3206 fprintf (stderr, "\n bb count: ");
3207 gimple_bb (e->call_stmt)->count.dump (stderr);
3208 fprintf (stderr, "\n");
3209 error_found = true;
3210 }
3211 }
3212 if (!callers && inlined_to)
3213 {
3214 error ("inlined_to pointer is set but no predecessors found");
3215 error_found = true;
3216 }
3217 if (inlined_to == this)
3218 {
3219 error ("inlined_to pointer refers to itself");
3220 error_found = true;
3221 }
3222
3223 if (clone_of)
3224 {
3225 cgraph_node *first_clone = clone_of->clones;
3226 if (first_clone != this)
3227 {
3228 if (prev_sibling_clone->clone_of != clone_of)
3229 {
3230 error ("cgraph_node has wrong clone_of");
3231 error_found = true;
3232 }
3233 }
3234 }
3235 if (clones)
3236 {
3237 cgraph_node *n;
3238 for (n = clones; n; n = n->next_sibling_clone)
3239 if (n->clone_of != this)
3240 break;
3241 if (n)
3242 {
3243 error ("cgraph_node has wrong clone list");
3244 error_found = true;
3245 }
3246 }
3247 if ((prev_sibling_clone || next_sibling_clone) && !clone_of)
3248 {
3249 error ("cgraph_node is in clone list but it is not clone");
3250 error_found = true;
3251 }
3252 if (!prev_sibling_clone && clone_of && clone_of->clones != this)
3253 {
3254 error ("cgraph_node has wrong prev_clone pointer");
3255 error_found = true;
3256 }
3257 if (prev_sibling_clone && prev_sibling_clone->next_sibling_clone != this)
3258 {
3259 error ("double linked list of clones corrupted");
3260 error_found = true;
3261 }
3262
3263 if (analyzed && alias)
3264 {
3265 bool ref_found = false;
3266 int i;
3267 ipa_ref *ref = NULL;
3268
3269 if (callees)
3270 {
3271 error ("Alias has call edges");
3272 error_found = true;
3273 }
3274 for (i = 0; iterate_reference (i, ref); i++)
3275 if (ref->use != IPA_REF_ALIAS)
3276 {
3277 error ("Alias has non-alias reference");
3278 error_found = true;
3279 }
3280 else if (ref_found)
3281 {
3282 error ("Alias has more than one alias reference");
3283 error_found = true;
3284 }
3285 else
3286 ref_found = true;
3287 if (!ref_found)
3288 {
3289 error ("Analyzed alias has no reference");
3290 error_found = true;
3291 }
3292 }
3293
3294 if (analyzed && thunk.thunk_p)
3295 {
3296 if (!callees)
3297 {
3298 error ("No edge out of thunk node");
3299 error_found = true;
3300 }
3301 else if (callees->next_callee)
3302 {
3303 error ("More than one edge out of thunk node");
3304 error_found = true;
3305 }
3306 if (gimple_has_body_p (decl) && !inlined_to)
3307 {
3308 error ("Thunk is not supposed to have body");
3309 error_found = true;
3310 }
3311 }
3312 else if (analyzed && gimple_has_body_p (decl)
3313 && !TREE_ASM_WRITTEN (decl)
3314 && (!DECL_EXTERNAL (decl) || inlined_to)
3315 && !flag_wpa)
3316 {
3317 if (this_cfun->cfg)
3318 {
3319 hash_set<gimple *> stmts;
3320 int i;
3321 ipa_ref *ref = NULL;
3322
3323 /* Reach the trees by walking over the CFG, and note the
3324 enclosing basic-blocks in the call edges. */
3325 FOR_EACH_BB_FN (this_block, this_cfun)
3326 {
3327 for (gsi = gsi_start_phis (this_block);
3328 !gsi_end_p (gsi); gsi_next (&gsi))
3329 stmts.add (gsi_stmt (gsi));
3330 for (gsi = gsi_start_bb (this_block);
3331 !gsi_end_p (gsi);
3332 gsi_next (&gsi))
3333 {
3334 gimple *stmt = gsi_stmt (gsi);
3335 stmts.add (stmt);
3336 if (is_gimple_call (stmt))
3337 {
3338 cgraph_edge *e = get_edge (stmt);
3339 tree decl = gimple_call_fndecl (stmt);
3340 if (e)
3341 {
3342 if (e->aux)
3343 {
3344 error ("shared call_stmt:");
3345 cgraph_debug_gimple_stmt (this_cfun, stmt);
3346 error_found = true;
3347 }
3348 if (!e->indirect_unknown_callee)
3349 {
3350 if (e->verify_corresponds_to_fndecl (decl))
3351 {
3352 error ("edge points to wrong declaration:");
3353 debug_tree (e->callee->decl);
3354 fprintf (stderr," Instead of:");
3355 debug_tree (decl);
3356 error_found = true;
3357 }
3358 }
3359 else if (decl)
3360 {
3361 error ("an indirect edge with unknown callee "
3362 "corresponding to a call_stmt with "
3363 "a known declaration:");
3364 error_found = true;
3365 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3366 }
3367 e->aux = (void *)1;
3368 }
3369 else if (decl)
3370 {
3371 error ("missing callgraph edge for call stmt:");
3372 cgraph_debug_gimple_stmt (this_cfun, stmt);
3373 error_found = true;
3374 }
3375 }
3376 }
3377 }
3378 for (i = 0; iterate_reference (i, ref); i++)
3379 if (ref->stmt && !stmts.contains (ref->stmt))
3380 {
3381 error ("reference to dead statement");
3382 cgraph_debug_gimple_stmt (this_cfun, ref->stmt);
3383 error_found = true;
3384 }
3385 }
3386 else
3387 /* No CFG available?! */
3388 gcc_unreachable ();
3389
3390 for (e = callees; e; e = e->next_callee)
3391 {
3392 if (!e->aux)
3393 {
3394 error ("edge %s->%s has no corresponding call_stmt",
3395 identifier_to_locale (e->caller->name ()),
3396 identifier_to_locale (e->callee->name ()));
3397 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3398 error_found = true;
3399 }
3400 e->aux = 0;
3401 }
3402 for (e = indirect_calls; e; e = e->next_callee)
3403 {
3404 if (!e->aux && !e->speculative)
3405 {
3406 error ("an indirect edge from %s has no corresponding call_stmt",
3407 identifier_to_locale (e->caller->name ()));
3408 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3409 error_found = true;
3410 }
3411 e->aux = 0;
3412 }
3413 }
3414
3415 if (nested != NULL)
3416 {
3417 for (cgraph_node *n = nested; n != NULL; n = n->next_nested)
3418 {
3419 if (n->origin == NULL)
3420 {
3421 error ("missing origin for a node in a nested list");
3422 error_found = true;
3423 }
3424 else if (n->origin != this)
3425 {
3426 error ("origin points to a different parent");
3427 error_found = true;
3428 break;
3429 }
3430 }
3431 }
3432 if (next_nested != NULL && origin == NULL)
3433 {
3434 error ("missing origin for a node in a nested list");
3435 error_found = true;
3436 }
3437
3438 if (error_found)
3439 {
3440 dump (stderr);
3441 internal_error ("verify_cgraph_node failed");
3442 }
3443 timevar_pop (TV_CGRAPH_VERIFY);
3444 }
3445
3446 /* Verify whole cgraph structure. */
3447 DEBUG_FUNCTION void
3448 cgraph_node::verify_cgraph_nodes (void)
3449 {
3450 cgraph_node *node;
3451
3452 if (seen_error ())
3453 return;
3454
3455 FOR_EACH_FUNCTION (node)
3456 node->verify ();
3457 }
3458
3459 #if __GNUC__ >= 10
3460 # pragma GCC diagnostic pop
3461 #endif
3462
3463 /* Walk the alias chain to return the function cgraph_node is alias of.
3464 Walk through thunks, too.
3465 When AVAILABILITY is non-NULL, get minimal availability in the chain.
3466 When REF is non-NULL, assume that reference happens in symbol REF
3467 when determining the availability. */
3468
3469 cgraph_node *
3470 cgraph_node::function_symbol (enum availability *availability,
3471 struct symtab_node *ref)
3472 {
3473 cgraph_node *node = ultimate_alias_target (availability, ref);
3474
3475 while (node->thunk.thunk_p)
3476 {
3477 ref = node;
3478 node = node->callees->callee;
3479 if (availability)
3480 {
3481 enum availability a;
3482 a = node->get_availability (ref);
3483 if (a < *availability)
3484 *availability = a;
3485 }
3486 node = node->ultimate_alias_target (availability, ref);
3487 }
3488 return node;
3489 }
3490
3491 /* Walk the alias chain to return the function cgraph_node is alias of.
3492 Walk through non virtual thunks, too. Thus we return either a function
3493 or a virtual thunk node.
3494 When AVAILABILITY is non-NULL, get minimal availability in the chain.
3495 When REF is non-NULL, assume that reference happens in symbol REF
3496 when determining the availability. */
3497
3498 cgraph_node *
3499 cgraph_node::function_or_virtual_thunk_symbol
3500 (enum availability *availability,
3501 struct symtab_node *ref)
3502 {
3503 cgraph_node *node = ultimate_alias_target (availability, ref);
3504
3505 while (node->thunk.thunk_p && !node->thunk.virtual_offset_p)
3506 {
3507 ref = node;
3508 node = node->callees->callee;
3509 if (availability)
3510 {
3511 enum availability a;
3512 a = node->get_availability (ref);
3513 if (a < *availability)
3514 *availability = a;
3515 }
3516 node = node->ultimate_alias_target (availability, ref);
3517 }
3518 return node;
3519 }
3520
3521 /* When doing LTO, read cgraph_node's body from disk if it is not already
3522 present. */
3523
3524 bool
3525 cgraph_node::get_untransformed_body (void)
3526 {
3527 lto_file_decl_data *file_data;
3528 const char *data, *name;
3529 size_t len;
3530 tree decl = this->decl;
3531
3532 /* Check if body is already there. Either we have gimple body or
3533 the function is thunk and in that case we set DECL_ARGUMENTS. */
3534 if (DECL_ARGUMENTS (decl) || gimple_has_body_p (decl))
3535 return false;
3536
3537 gcc_assert (in_lto_p && !DECL_RESULT (decl));
3538
3539 timevar_push (TV_IPA_LTO_GIMPLE_IN);
3540
3541 file_data = lto_file_data;
3542 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
3543
3544 /* We may have renamed the declaration, e.g., a static function. */
3545 name = lto_get_decl_name_mapping (file_data, name);
3546 struct lto_in_decl_state *decl_state
3547 = lto_get_function_in_decl_state (file_data, decl);
3548
3549 data = lto_get_section_data (file_data, LTO_section_function_body,
3550 name, &len, decl_state->compressed);
3551 if (!data)
3552 fatal_error (input_location, "%s: section %s is missing",
3553 file_data->file_name,
3554 name);
3555
3556 gcc_assert (DECL_STRUCT_FUNCTION (decl) == NULL);
3557
3558 if (!quiet_flag)
3559 fprintf (stderr, " in:%s", IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
3560 lto_input_function_body (file_data, this, data);
3561 lto_stats.num_function_bodies++;
3562 lto_free_section_data (file_data, LTO_section_function_body, name,
3563 data, len, decl_state->compressed);
3564 lto_free_function_in_decl_state_for_node (this);
3565 /* Keep lto file data so ipa-inline-analysis knows about cross module
3566 inlining. */
3567
3568 timevar_pop (TV_IPA_LTO_GIMPLE_IN);
3569
3570 return true;
3571 }
3572
3573 /* Prepare function body. When doing LTO, read cgraph_node's body from disk
3574 if it is not already present. When some IPA transformations are scheduled,
3575 apply them. */
3576
3577 bool
3578 cgraph_node::get_body (void)
3579 {
3580 bool updated;
3581
3582 updated = get_untransformed_body ();
3583
3584 /* Getting transformed body makes no sense for inline clones;
3585 we should never use this on real clones because they are materialized
3586 early.
3587 TODO: Materializing clones here will likely lead to smaller LTRANS
3588 footprint. */
3589 gcc_assert (!inlined_to && !clone_of);
3590 if (ipa_transforms_to_apply.exists ())
3591 {
3592 opt_pass *saved_current_pass = current_pass;
3593 FILE *saved_dump_file = dump_file;
3594 const char *saved_dump_file_name = dump_file_name;
3595 dump_flags_t saved_dump_flags = dump_flags;
3596 dump_file_name = NULL;
3597 set_dump_file (NULL);
3598
3599 push_cfun (DECL_STRUCT_FUNCTION (decl));
3600 execute_all_ipa_transforms (true);
3601 cgraph_edge::rebuild_edges ();
3602 free_dominance_info (CDI_DOMINATORS);
3603 free_dominance_info (CDI_POST_DOMINATORS);
3604 pop_cfun ();
3605 updated = true;
3606
3607 current_pass = saved_current_pass;
3608 set_dump_file (saved_dump_file);
3609 dump_file_name = saved_dump_file_name;
3610 dump_flags = saved_dump_flags;
3611 }
3612 return updated;
3613 }
3614
3615 /* Return the DECL_STRUCT_FUNCTION of the function. */
3616
3617 struct function *
3618 cgraph_node::get_fun () const
3619 {
3620 const cgraph_node *node = this;
3621 struct function *fun = DECL_STRUCT_FUNCTION (node->decl);
3622
3623 while (!fun && node->clone_of)
3624 {
3625 node = node->clone_of;
3626 fun = DECL_STRUCT_FUNCTION (node->decl);
3627 }
3628
3629 return fun;
3630 }
3631
3632 /* Verify if the type of the argument matches that of the function
3633 declaration. If we cannot verify this or there is a mismatch,
3634 return false. */
3635
3636 static bool
3637 gimple_check_call_args (gimple *stmt, tree fndecl, bool args_count_match)
3638 {
3639 tree parms, p;
3640 unsigned int i, nargs;
3641
3642 /* Calls to internal functions always match their signature. */
3643 if (gimple_call_internal_p (stmt))
3644 return true;
3645
3646 nargs = gimple_call_num_args (stmt);
3647
3648 /* Get argument types for verification. */
3649 if (fndecl)
3650 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3651 else
3652 parms = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
3653
3654 /* Verify if the type of the argument matches that of the function
3655 declaration. If we cannot verify this or there is a mismatch,
3656 return false. */
3657 if (fndecl && DECL_ARGUMENTS (fndecl))
3658 {
3659 for (i = 0, p = DECL_ARGUMENTS (fndecl);
3660 i < nargs;
3661 i++, p = DECL_CHAIN (p))
3662 {
3663 tree arg;
3664 /* We cannot distinguish a varargs function from the case
3665 of excess parameters, still deferring the inlining decision
3666 to the callee is possible. */
3667 if (!p)
3668 break;
3669 arg = gimple_call_arg (stmt, i);
3670 if (p == error_mark_node
3671 || DECL_ARG_TYPE (p) == error_mark_node
3672 || arg == error_mark_node
3673 || (!types_compatible_p (DECL_ARG_TYPE (p), TREE_TYPE (arg))
3674 && !fold_convertible_p (DECL_ARG_TYPE (p), arg)))
3675 return false;
3676 }
3677 if (args_count_match && p)
3678 return false;
3679 }
3680 else if (parms)
3681 {
3682 for (i = 0, p = parms; i < nargs; i++, p = TREE_CHAIN (p))
3683 {
3684 tree arg;
3685 /* If this is a varargs function defer inlining decision
3686 to callee. */
3687 if (!p)
3688 break;
3689 arg = gimple_call_arg (stmt, i);
3690 if (TREE_VALUE (p) == error_mark_node
3691 || arg == error_mark_node
3692 || TREE_CODE (TREE_VALUE (p)) == VOID_TYPE
3693 || (!types_compatible_p (TREE_VALUE (p), TREE_TYPE (arg))
3694 && !fold_convertible_p (TREE_VALUE (p), arg)))
3695 return false;
3696 }
3697 }
3698 else
3699 {
3700 if (nargs != 0)
3701 return false;
3702 }
3703 return true;
3704 }
3705
3706 /* Verify if the type of the argument and lhs of CALL_STMT matches
3707 that of the function declaration CALLEE. If ARGS_COUNT_MATCH is
3708 true, the arg count needs to be the same.
3709 If we cannot verify this or there is a mismatch, return false. */
3710
3711 bool
3712 gimple_check_call_matching_types (gimple *call_stmt, tree callee,
3713 bool args_count_match)
3714 {
3715 tree lhs;
3716
3717 if ((DECL_RESULT (callee)
3718 && !DECL_BY_REFERENCE (DECL_RESULT (callee))
3719 && (lhs = gimple_call_lhs (call_stmt)) != NULL_TREE
3720 && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee)),
3721 TREE_TYPE (lhs))
3722 && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee)), lhs))
3723 || !gimple_check_call_args (call_stmt, callee, args_count_match))
3724 return false;
3725 return true;
3726 }
3727
3728 /* Reset all state within cgraph.c so that we can rerun the compiler
3729 within the same process. For use by toplev::finalize. */
3730
3731 void
3732 cgraph_c_finalize (void)
3733 {
3734 symtab = NULL;
3735
3736 x_cgraph_nodes_queue = NULL;
3737
3738 cgraph_fnver_htab = NULL;
3739 version_info_node = NULL;
3740 }
3741
3742 /* A wroker for call_for_symbol_and_aliases. */
3743
3744 bool
3745 cgraph_node::call_for_symbol_and_aliases_1 (bool (*callback) (cgraph_node *,
3746 void *),
3747 void *data,
3748 bool include_overwritable)
3749 {
3750 ipa_ref *ref;
3751 FOR_EACH_ALIAS (this, ref)
3752 {
3753 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
3754 if (include_overwritable
3755 || alias->get_availability () > AVAIL_INTERPOSABLE)
3756 if (alias->call_for_symbol_and_aliases (callback, data,
3757 include_overwritable))
3758 return true;
3759 }
3760 return false;
3761 }
3762
3763 /* Return true if NODE has thunk. */
3764
3765 bool
3766 cgraph_node::has_thunk_p (cgraph_node *node, void *)
3767 {
3768 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
3769 if (e->caller->thunk.thunk_p)
3770 return true;
3771 return false;
3772 }
3773
3774 /* Expected frequency of executions within the function. */
3775
3776 sreal
3777 cgraph_edge::sreal_frequency ()
3778 {
3779 return count.to_sreal_scale (caller->inlined_to
3780 ? caller->inlined_to->count
3781 : caller->count);
3782 }
3783
3784
3785 /* During LTO stream in this can be used to check whether call can possibly
3786 be internal to the current translation unit. */
3787
3788 bool
3789 cgraph_edge::possibly_call_in_translation_unit_p (void)
3790 {
3791 gcc_checking_assert (in_lto_p && caller->prevailing_p ());
3792
3793 /* While incremental linking we may end up getting function body later. */
3794 if (flag_incremental_link == INCREMENTAL_LINK_LTO)
3795 return true;
3796
3797 /* We may be smarter here and avoid stremaing in indirect calls we can't
3798 track, but that would require arranging stremaing the indirect call
3799 summary first. */
3800 if (!callee)
3801 return true;
3802
3803 /* If calle is local to the original translation unit, it will be defined. */
3804 if (!TREE_PUBLIC (callee->decl) && !DECL_EXTERNAL (callee->decl))
3805 return true;
3806
3807 /* Otherwise we need to lookup prevailing symbol (symbol table is not merged,
3808 yet) and see if it is a definition. In fact we may also resolve aliases,
3809 but that is probably not too important. */
3810 symtab_node *node = callee;
3811 for (int n = 10; node->previous_sharing_asm_name && n ; n--)
3812 node = node->previous_sharing_asm_name;
3813 if (node->previous_sharing_asm_name)
3814 node = symtab_node::get_for_asmname (DECL_ASSEMBLER_NAME (callee->decl));
3815 gcc_assert (TREE_PUBLIC (node->decl));
3816 return node->get_availability () >= AVAIL_AVAILABLE;
3817 }
3818
3819 /* A stashed copy of "symtab" for use by selftest::symbol_table_test.
3820 This needs to be a global so that it can be a GC root, and thus
3821 prevent the stashed copy from being garbage-collected if the GC runs
3822 during a symbol_table_test. */
3823
3824 symbol_table *saved_symtab;
3825
3826 #if CHECKING_P
3827
3828 namespace selftest {
3829
3830 /* class selftest::symbol_table_test. */
3831
3832 /* Constructor. Store the old value of symtab, and create a new one. */
3833
3834 symbol_table_test::symbol_table_test ()
3835 {
3836 gcc_assert (saved_symtab == NULL);
3837 saved_symtab = symtab;
3838 symtab = new (ggc_alloc <symbol_table> ()) symbol_table ();
3839 }
3840
3841 /* Destructor. Restore the old value of symtab. */
3842
3843 symbol_table_test::~symbol_table_test ()
3844 {
3845 gcc_assert (saved_symtab != NULL);
3846 symtab = saved_symtab;
3847 saved_symtab = NULL;
3848 }
3849
3850 /* Verify that symbol_table_test works. */
3851
3852 static void
3853 test_symbol_table_test ()
3854 {
3855 /* Simulate running two selftests involving symbol tables. */
3856 for (int i = 0; i < 2; i++)
3857 {
3858 symbol_table_test stt;
3859 tree test_decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
3860 get_identifier ("test_decl"),
3861 build_function_type_list (void_type_node,
3862 NULL_TREE));
3863 cgraph_node *node = cgraph_node::get_create (test_decl);
3864 gcc_assert (node);
3865
3866 /* Verify that the node has order 0 on both iterations,
3867 and thus that nodes have predictable dump names in selftests. */
3868 ASSERT_EQ (node->order, 0);
3869 ASSERT_STREQ (node->dump_name (), "test_decl/0");
3870 }
3871 }
3872
3873 /* Run all of the selftests within this file. */
3874
3875 void
3876 cgraph_c_tests ()
3877 {
3878 test_symbol_table_test ();
3879 }
3880
3881 } // namespace selftest
3882
3883 #endif /* CHECKING_P */
3884
3885 #include "gt-cgraph.h"