Add -std=c17, -std=gnu17.
[gcc.git] / gcc / cgraph.c
1 /* Callgraph handling code.
2 Copyright (C) 2003-2017 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This file contains basic routines manipulating call graph
22
23 The call-graph is a data structure designed for inter-procedural
24 optimization. It represents a multi-graph where nodes are functions
25 (symbols within symbol table) and edges are call sites. */
26
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "backend.h"
31 #include "target.h"
32 #include "rtl.h"
33 #include "tree.h"
34 #include "gimple.h"
35 #include "predict.h"
36 #include "alloc-pool.h"
37 #include "gimple-ssa.h"
38 #include "cgraph.h"
39 #include "lto-streamer.h"
40 #include "fold-const.h"
41 #include "varasm.h"
42 #include "calls.h"
43 #include "print-tree.h"
44 #include "langhooks.h"
45 #include "intl.h"
46 #include "tree-eh.h"
47 #include "gimple-iterator.h"
48 #include "tree-cfg.h"
49 #include "tree-ssa.h"
50 #include "value-prof.h"
51 #include "ipa-utils.h"
52 #include "symbol-summary.h"
53 #include "tree-vrp.h"
54 #include "ipa-prop.h"
55 #include "ipa-fnsummary.h"
56 #include "cfgloop.h"
57 #include "gimple-pretty-print.h"
58 #include "tree-dfa.h"
59 #include "profile.h"
60 #include "params.h"
61 #include "tree-chkp.h"
62 #include "context.h"
63 #include "gimplify.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66
67 /* FIXME: Only for PROP_loops, but cgraph shouldn't have to know about this. */
68 #include "tree-pass.h"
69
70 /* Queue of cgraph nodes scheduled to be lowered. */
71 symtab_node *x_cgraph_nodes_queue;
72 #define cgraph_nodes_queue ((cgraph_node *)x_cgraph_nodes_queue)
73
74 /* Symbol table global context. */
75 symbol_table *symtab;
76
77 /* List of hooks triggered on cgraph_edge events. */
78 struct cgraph_edge_hook_list {
79 cgraph_edge_hook hook;
80 void *data;
81 struct cgraph_edge_hook_list *next;
82 };
83
84 /* List of hooks triggered on cgraph_node events. */
85 struct cgraph_node_hook_list {
86 cgraph_node_hook hook;
87 void *data;
88 struct cgraph_node_hook_list *next;
89 };
90
91 /* List of hooks triggered on events involving two cgraph_edges. */
92 struct cgraph_2edge_hook_list {
93 cgraph_2edge_hook hook;
94 void *data;
95 struct cgraph_2edge_hook_list *next;
96 };
97
98 /* List of hooks triggered on events involving two cgraph_nodes. */
99 struct cgraph_2node_hook_list {
100 cgraph_2node_hook hook;
101 void *data;
102 struct cgraph_2node_hook_list *next;
103 };
104
105 /* Hash descriptor for cgraph_function_version_info. */
106
107 struct function_version_hasher : ggc_ptr_hash<cgraph_function_version_info>
108 {
109 static hashval_t hash (cgraph_function_version_info *);
110 static bool equal (cgraph_function_version_info *,
111 cgraph_function_version_info *);
112 };
113
114 /* Map a cgraph_node to cgraph_function_version_info using this htab.
115 The cgraph_function_version_info has a THIS_NODE field that is the
116 corresponding cgraph_node.. */
117
118 static GTY(()) hash_table<function_version_hasher> *cgraph_fnver_htab = NULL;
119
120 /* Hash function for cgraph_fnver_htab. */
121 hashval_t
122 function_version_hasher::hash (cgraph_function_version_info *ptr)
123 {
124 int uid = ptr->this_node->uid;
125 return (hashval_t)(uid);
126 }
127
128 /* eq function for cgraph_fnver_htab. */
129 bool
130 function_version_hasher::equal (cgraph_function_version_info *n1,
131 cgraph_function_version_info *n2)
132 {
133 return n1->this_node->uid == n2->this_node->uid;
134 }
135
136 /* Mark as GC root all allocated nodes. */
137 static GTY(()) struct cgraph_function_version_info *
138 version_info_node = NULL;
139
140 /* Return true if NODE's address can be compared. */
141
142 bool
143 symtab_node::address_can_be_compared_p ()
144 {
145 /* Address of virtual tables and functions is never compared. */
146 if (DECL_VIRTUAL_P (decl))
147 return false;
148 /* Address of C++ cdtors is never compared. */
149 if (is_a <cgraph_node *> (this)
150 && (DECL_CXX_CONSTRUCTOR_P (decl)
151 || DECL_CXX_DESTRUCTOR_P (decl)))
152 return false;
153 /* Constant pool symbols addresses are never compared.
154 flag_merge_constants permits us to assume the same on readonly vars. */
155 if (is_a <varpool_node *> (this)
156 && (DECL_IN_CONSTANT_POOL (decl)
157 || (flag_merge_constants >= 2
158 && TREE_READONLY (decl) && !TREE_THIS_VOLATILE (decl))))
159 return false;
160 return true;
161 }
162
163 /* Get the cgraph_function_version_info node corresponding to node. */
164 cgraph_function_version_info *
165 cgraph_node::function_version (void)
166 {
167 cgraph_function_version_info key;
168 key.this_node = this;
169
170 if (cgraph_fnver_htab == NULL)
171 return NULL;
172
173 return cgraph_fnver_htab->find (&key);
174 }
175
176 /* Insert a new cgraph_function_version_info node into cgraph_fnver_htab
177 corresponding to cgraph_node NODE. */
178 cgraph_function_version_info *
179 cgraph_node::insert_new_function_version (void)
180 {
181 version_info_node = NULL;
182 version_info_node = ggc_cleared_alloc<cgraph_function_version_info> ();
183 version_info_node->this_node = this;
184
185 if (cgraph_fnver_htab == NULL)
186 cgraph_fnver_htab = hash_table<function_version_hasher>::create_ggc (2);
187
188 *cgraph_fnver_htab->find_slot (version_info_node, INSERT)
189 = version_info_node;
190 return version_info_node;
191 }
192
193 /* Remove the cgraph_function_version_info node given by DECL_V. */
194 static void
195 delete_function_version (cgraph_function_version_info *decl_v)
196 {
197 if (decl_v == NULL)
198 return;
199
200 if (decl_v->prev != NULL)
201 decl_v->prev->next = decl_v->next;
202
203 if (decl_v->next != NULL)
204 decl_v->next->prev = decl_v->prev;
205
206 if (cgraph_fnver_htab != NULL)
207 cgraph_fnver_htab->remove_elt (decl_v);
208 }
209
210 /* Remove the cgraph_function_version_info and cgraph_node for DECL. This
211 DECL is a duplicate declaration. */
212 void
213 cgraph_node::delete_function_version_by_decl (tree decl)
214 {
215 cgraph_node *decl_node = cgraph_node::get (decl);
216
217 if (decl_node == NULL)
218 return;
219
220 delete_function_version (decl_node->function_version ());
221
222 decl_node->remove ();
223 }
224
225 /* Record that DECL1 and DECL2 are semantically identical function
226 versions. */
227 void
228 cgraph_node::record_function_versions (tree decl1, tree decl2)
229 {
230 cgraph_node *decl1_node = cgraph_node::get_create (decl1);
231 cgraph_node *decl2_node = cgraph_node::get_create (decl2);
232 cgraph_function_version_info *decl1_v = NULL;
233 cgraph_function_version_info *decl2_v = NULL;
234 cgraph_function_version_info *before;
235 cgraph_function_version_info *after;
236
237 gcc_assert (decl1_node != NULL && decl2_node != NULL);
238 decl1_v = decl1_node->function_version ();
239 decl2_v = decl2_node->function_version ();
240
241 if (decl1_v != NULL && decl2_v != NULL)
242 return;
243
244 if (decl1_v == NULL)
245 decl1_v = decl1_node->insert_new_function_version ();
246
247 if (decl2_v == NULL)
248 decl2_v = decl2_node->insert_new_function_version ();
249
250 /* Chain decl2_v and decl1_v. All semantically identical versions
251 will be chained together. */
252
253 before = decl1_v;
254 after = decl2_v;
255
256 while (before->next != NULL)
257 before = before->next;
258
259 while (after->prev != NULL)
260 after= after->prev;
261
262 before->next = after;
263 after->prev = before;
264 }
265
266 /* Initialize callgraph dump file. */
267
268 void
269 symbol_table::initialize (void)
270 {
271 if (!dump_file)
272 dump_file = dump_begin (TDI_cgraph, NULL);
273
274 if (!ipa_clones_dump_file)
275 ipa_clones_dump_file = dump_begin (TDI_clones, NULL);
276 }
277
278 /* Allocate new callgraph node and insert it into basic data structures. */
279
280 cgraph_node *
281 symbol_table::create_empty (void)
282 {
283 cgraph_node *node = allocate_cgraph_symbol ();
284
285 node->type = SYMTAB_FUNCTION;
286 node->frequency = NODE_FREQUENCY_NORMAL;
287 node->count_materialization_scale = REG_BR_PROB_BASE;
288 cgraph_count++;
289
290 return node;
291 }
292
293 /* Register HOOK to be called with DATA on each removed edge. */
294 cgraph_edge_hook_list *
295 symbol_table::add_edge_removal_hook (cgraph_edge_hook hook, void *data)
296 {
297 cgraph_edge_hook_list *entry;
298 cgraph_edge_hook_list **ptr = &m_first_edge_removal_hook;
299
300 entry = (cgraph_edge_hook_list *) xmalloc (sizeof (*entry));
301 entry->hook = hook;
302 entry->data = data;
303 entry->next = NULL;
304 while (*ptr)
305 ptr = &(*ptr)->next;
306 *ptr = entry;
307 return entry;
308 }
309
310 /* Remove ENTRY from the list of hooks called on removing edges. */
311 void
312 symbol_table::remove_edge_removal_hook (cgraph_edge_hook_list *entry)
313 {
314 cgraph_edge_hook_list **ptr = &m_first_edge_removal_hook;
315
316 while (*ptr != entry)
317 ptr = &(*ptr)->next;
318 *ptr = entry->next;
319 free (entry);
320 }
321
322 /* Call all edge removal hooks. */
323 void
324 symbol_table::call_edge_removal_hooks (cgraph_edge *e)
325 {
326 cgraph_edge_hook_list *entry = m_first_edge_removal_hook;
327 while (entry)
328 {
329 entry->hook (e, entry->data);
330 entry = entry->next;
331 }
332 }
333
334 /* Register HOOK to be called with DATA on each removed node. */
335 cgraph_node_hook_list *
336 symbol_table::add_cgraph_removal_hook (cgraph_node_hook hook, void *data)
337 {
338 cgraph_node_hook_list *entry;
339 cgraph_node_hook_list **ptr = &m_first_cgraph_removal_hook;
340
341 entry = (cgraph_node_hook_list *) xmalloc (sizeof (*entry));
342 entry->hook = hook;
343 entry->data = data;
344 entry->next = NULL;
345 while (*ptr)
346 ptr = &(*ptr)->next;
347 *ptr = entry;
348 return entry;
349 }
350
351 /* Remove ENTRY from the list of hooks called on removing nodes. */
352 void
353 symbol_table::remove_cgraph_removal_hook (cgraph_node_hook_list *entry)
354 {
355 cgraph_node_hook_list **ptr = &m_first_cgraph_removal_hook;
356
357 while (*ptr != entry)
358 ptr = &(*ptr)->next;
359 *ptr = entry->next;
360 free (entry);
361 }
362
363 /* Call all node removal hooks. */
364 void
365 symbol_table::call_cgraph_removal_hooks (cgraph_node *node)
366 {
367 cgraph_node_hook_list *entry = m_first_cgraph_removal_hook;
368 while (entry)
369 {
370 entry->hook (node, entry->data);
371 entry = entry->next;
372 }
373 }
374
375 /* Call all node removal hooks. */
376 void
377 symbol_table::call_cgraph_insertion_hooks (cgraph_node *node)
378 {
379 cgraph_node_hook_list *entry = m_first_cgraph_insertion_hook;
380 while (entry)
381 {
382 entry->hook (node, entry->data);
383 entry = entry->next;
384 }
385 }
386
387
388 /* Register HOOK to be called with DATA on each inserted node. */
389 cgraph_node_hook_list *
390 symbol_table::add_cgraph_insertion_hook (cgraph_node_hook hook, void *data)
391 {
392 cgraph_node_hook_list *entry;
393 cgraph_node_hook_list **ptr = &m_first_cgraph_insertion_hook;
394
395 entry = (cgraph_node_hook_list *) xmalloc (sizeof (*entry));
396 entry->hook = hook;
397 entry->data = data;
398 entry->next = NULL;
399 while (*ptr)
400 ptr = &(*ptr)->next;
401 *ptr = entry;
402 return entry;
403 }
404
405 /* Remove ENTRY from the list of hooks called on inserted nodes. */
406 void
407 symbol_table::remove_cgraph_insertion_hook (cgraph_node_hook_list *entry)
408 {
409 cgraph_node_hook_list **ptr = &m_first_cgraph_insertion_hook;
410
411 while (*ptr != entry)
412 ptr = &(*ptr)->next;
413 *ptr = entry->next;
414 free (entry);
415 }
416
417 /* Register HOOK to be called with DATA on each duplicated edge. */
418 cgraph_2edge_hook_list *
419 symbol_table::add_edge_duplication_hook (cgraph_2edge_hook hook, void *data)
420 {
421 cgraph_2edge_hook_list *entry;
422 cgraph_2edge_hook_list **ptr = &m_first_edge_duplicated_hook;
423
424 entry = (cgraph_2edge_hook_list *) xmalloc (sizeof (*entry));
425 entry->hook = hook;
426 entry->data = data;
427 entry->next = NULL;
428 while (*ptr)
429 ptr = &(*ptr)->next;
430 *ptr = entry;
431 return entry;
432 }
433
434 /* Remove ENTRY from the list of hooks called on duplicating edges. */
435 void
436 symbol_table::remove_edge_duplication_hook (cgraph_2edge_hook_list *entry)
437 {
438 cgraph_2edge_hook_list **ptr = &m_first_edge_duplicated_hook;
439
440 while (*ptr != entry)
441 ptr = &(*ptr)->next;
442 *ptr = entry->next;
443 free (entry);
444 }
445
446 /* Call all edge duplication hooks. */
447 void
448 symbol_table::call_edge_duplication_hooks (cgraph_edge *cs1, cgraph_edge *cs2)
449 {
450 cgraph_2edge_hook_list *entry = m_first_edge_duplicated_hook;
451 while (entry)
452 {
453 entry->hook (cs1, cs2, entry->data);
454 entry = entry->next;
455 }
456 }
457
458 /* Register HOOK to be called with DATA on each duplicated node. */
459 cgraph_2node_hook_list *
460 symbol_table::add_cgraph_duplication_hook (cgraph_2node_hook hook, void *data)
461 {
462 cgraph_2node_hook_list *entry;
463 cgraph_2node_hook_list **ptr = &m_first_cgraph_duplicated_hook;
464
465 entry = (cgraph_2node_hook_list *) xmalloc (sizeof (*entry));
466 entry->hook = hook;
467 entry->data = data;
468 entry->next = NULL;
469 while (*ptr)
470 ptr = &(*ptr)->next;
471 *ptr = entry;
472 return entry;
473 }
474
475 /* Remove ENTRY from the list of hooks called on duplicating nodes. */
476 void
477 symbol_table::remove_cgraph_duplication_hook (cgraph_2node_hook_list *entry)
478 {
479 cgraph_2node_hook_list **ptr = &m_first_cgraph_duplicated_hook;
480
481 while (*ptr != entry)
482 ptr = &(*ptr)->next;
483 *ptr = entry->next;
484 free (entry);
485 }
486
487 /* Call all node duplication hooks. */
488 void
489 symbol_table::call_cgraph_duplication_hooks (cgraph_node *node,
490 cgraph_node *node2)
491 {
492 cgraph_2node_hook_list *entry = m_first_cgraph_duplicated_hook;
493 while (entry)
494 {
495 entry->hook (node, node2, entry->data);
496 entry = entry->next;
497 }
498 }
499
500 /* Return cgraph node assigned to DECL. Create new one when needed. */
501
502 cgraph_node *
503 cgraph_node::create (tree decl)
504 {
505 cgraph_node *node = symtab->create_empty ();
506 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
507
508 node->decl = decl;
509
510 node->count = profile_count::uninitialized ();
511
512 if ((flag_openacc || flag_openmp)
513 && lookup_attribute ("omp declare target", DECL_ATTRIBUTES (decl)))
514 {
515 node->offloadable = 1;
516 if (ENABLE_OFFLOADING)
517 g->have_offload = true;
518 }
519
520 node->register_symbol ();
521
522 if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
523 {
524 node->origin = cgraph_node::get_create (DECL_CONTEXT (decl));
525 node->next_nested = node->origin->nested;
526 node->origin->nested = node;
527 }
528 return node;
529 }
530
531 /* Try to find a call graph node for declaration DECL and if it does not exist
532 or if it corresponds to an inline clone, create a new one. */
533
534 cgraph_node *
535 cgraph_node::get_create (tree decl)
536 {
537 cgraph_node *first_clone = cgraph_node::get (decl);
538
539 if (first_clone && !first_clone->global.inlined_to)
540 return first_clone;
541
542 cgraph_node *node = cgraph_node::create (decl);
543 if (first_clone)
544 {
545 first_clone->clone_of = node;
546 node->clones = first_clone;
547 symtab->symtab_prevail_in_asm_name_hash (node);
548 node->decl->decl_with_vis.symtab_node = node;
549 if (dump_file)
550 fprintf (dump_file, "Introduced new external node "
551 "(%s) and turned into root of the clone tree.\n",
552 node->dump_name ());
553 }
554 else if (dump_file)
555 fprintf (dump_file, "Introduced new external node "
556 "(%s).\n", node->dump_name ());
557 return node;
558 }
559
560 /* Mark ALIAS as an alias to DECL. DECL_NODE is cgraph node representing
561 the function body is associated with (not necessarily cgraph_node (DECL). */
562
563 cgraph_node *
564 cgraph_node::create_alias (tree alias, tree target)
565 {
566 cgraph_node *alias_node;
567
568 gcc_assert (TREE_CODE (target) == FUNCTION_DECL
569 || TREE_CODE (target) == IDENTIFIER_NODE);
570 gcc_assert (TREE_CODE (alias) == FUNCTION_DECL);
571 alias_node = cgraph_node::get_create (alias);
572 gcc_assert (!alias_node->definition);
573 alias_node->alias_target = target;
574 alias_node->definition = true;
575 alias_node->alias = true;
576 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (alias)) != NULL)
577 alias_node->transparent_alias = alias_node->weakref = true;
578 return alias_node;
579 }
580
581 /* Attempt to mark ALIAS as an alias to DECL. Return alias node if successful
582 and NULL otherwise.
583 Same body aliases are output whenever the body of DECL is output,
584 and cgraph_node::get (ALIAS) transparently returns
585 cgraph_node::get (DECL). */
586
587 cgraph_node *
588 cgraph_node::create_same_body_alias (tree alias, tree decl)
589 {
590 cgraph_node *n;
591
592 /* If aliases aren't supported by the assembler, fail. */
593 if (!TARGET_SUPPORTS_ALIASES)
594 return NULL;
595
596 /* Langhooks can create same body aliases of symbols not defined.
597 Those are useless. Drop them on the floor. */
598 if (symtab->global_info_ready)
599 return NULL;
600
601 n = cgraph_node::create_alias (alias, decl);
602 n->cpp_implicit_alias = true;
603 if (symtab->cpp_implicit_aliases_done)
604 n->resolve_alias (cgraph_node::get (decl));
605 return n;
606 }
607
608 /* Add thunk alias into callgraph. The alias declaration is ALIAS and it
609 aliases DECL with an adjustments made into the first parameter.
610 See comments in struct cgraph_thunk_info for detail on the parameters. */
611
612 cgraph_node *
613 cgraph_node::create_thunk (tree alias, tree, bool this_adjusting,
614 HOST_WIDE_INT fixed_offset,
615 HOST_WIDE_INT virtual_value,
616 tree virtual_offset,
617 tree real_alias)
618 {
619 cgraph_node *node;
620
621 node = cgraph_node::get (alias);
622 if (node)
623 node->reset ();
624 else
625 node = cgraph_node::create (alias);
626
627 /* Make sure that if VIRTUAL_OFFSET is in sync with VIRTUAL_VALUE. */
628 gcc_checking_assert (virtual_offset
629 ? virtual_value == wi::to_wide (virtual_offset)
630 : virtual_value == 0);
631
632 node->thunk.fixed_offset = fixed_offset;
633 node->thunk.virtual_value = virtual_value;
634 node->thunk.alias = real_alias;
635 node->thunk.this_adjusting = this_adjusting;
636 node->thunk.virtual_offset_p = virtual_offset != NULL;
637 node->thunk.thunk_p = true;
638 node->definition = true;
639
640 return node;
641 }
642
643 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
644 Return NULL if there's no such node. */
645
646 cgraph_node *
647 cgraph_node::get_for_asmname (tree asmname)
648 {
649 /* We do not want to look at inline clones. */
650 for (symtab_node *node = symtab_node::get_for_asmname (asmname);
651 node;
652 node = node->next_sharing_asm_name)
653 {
654 cgraph_node *cn = dyn_cast <cgraph_node *> (node);
655 if (cn && !cn->global.inlined_to)
656 return cn;
657 }
658 return NULL;
659 }
660
661 /* Returns a hash value for X (which really is a cgraph_edge). */
662
663 hashval_t
664 cgraph_edge_hasher::hash (cgraph_edge *e)
665 {
666 /* This is a really poor hash function, but it is what htab_hash_pointer
667 uses. */
668 return (hashval_t) ((intptr_t)e->call_stmt >> 3);
669 }
670
671 /* Returns a hash value for X (which really is a cgraph_edge). */
672
673 hashval_t
674 cgraph_edge_hasher::hash (gimple *call_stmt)
675 {
676 /* This is a really poor hash function, but it is what htab_hash_pointer
677 uses. */
678 return (hashval_t) ((intptr_t)call_stmt >> 3);
679 }
680
681 /* Return nonzero if the call_stmt of cgraph_edge X is stmt *Y. */
682
683 inline bool
684 cgraph_edge_hasher::equal (cgraph_edge *x, gimple *y)
685 {
686 return x->call_stmt == y;
687 }
688
689 /* Add call graph edge E to call site hash of its caller. */
690
691 static inline void
692 cgraph_update_edge_in_call_site_hash (cgraph_edge *e)
693 {
694 gimple *call = e->call_stmt;
695 *e->caller->call_site_hash->find_slot_with_hash
696 (call, cgraph_edge_hasher::hash (call), INSERT) = e;
697 }
698
699 /* Add call graph edge E to call site hash of its caller. */
700
701 static inline void
702 cgraph_add_edge_to_call_site_hash (cgraph_edge *e)
703 {
704 /* There are two speculative edges for every statement (one direct,
705 one indirect); always hash the direct one. */
706 if (e->speculative && e->indirect_unknown_callee)
707 return;
708 cgraph_edge **slot = e->caller->call_site_hash->find_slot_with_hash
709 (e->call_stmt, cgraph_edge_hasher::hash (e->call_stmt), INSERT);
710 if (*slot)
711 {
712 gcc_assert (((cgraph_edge *)*slot)->speculative);
713 if (e->callee)
714 *slot = e;
715 return;
716 }
717 gcc_assert (!*slot || e->speculative);
718 *slot = e;
719 }
720
721 /* Return the callgraph edge representing the GIMPLE_CALL statement
722 CALL_STMT. */
723
724 cgraph_edge *
725 cgraph_node::get_edge (gimple *call_stmt)
726 {
727 cgraph_edge *e, *e2;
728 int n = 0;
729
730 if (call_site_hash)
731 return call_site_hash->find_with_hash
732 (call_stmt, cgraph_edge_hasher::hash (call_stmt));
733
734 /* This loop may turn out to be performance problem. In such case adding
735 hashtables into call nodes with very many edges is probably best
736 solution. It is not good idea to add pointer into CALL_EXPR itself
737 because we want to make possible having multiple cgraph nodes representing
738 different clones of the same body before the body is actually cloned. */
739 for (e = callees; e; e = e->next_callee)
740 {
741 if (e->call_stmt == call_stmt)
742 break;
743 n++;
744 }
745
746 if (!e)
747 for (e = indirect_calls; e; e = e->next_callee)
748 {
749 if (e->call_stmt == call_stmt)
750 break;
751 n++;
752 }
753
754 if (n > 100)
755 {
756 call_site_hash = hash_table<cgraph_edge_hasher>::create_ggc (120);
757 for (e2 = callees; e2; e2 = e2->next_callee)
758 cgraph_add_edge_to_call_site_hash (e2);
759 for (e2 = indirect_calls; e2; e2 = e2->next_callee)
760 cgraph_add_edge_to_call_site_hash (e2);
761 }
762
763 return e;
764 }
765
766
767 /* Change field call_stmt of edge to NEW_STMT.
768 If UPDATE_SPECULATIVE and E is any component of speculative
769 edge, then update all components. */
770
771 void
772 cgraph_edge::set_call_stmt (gcall *new_stmt, bool update_speculative)
773 {
774 tree decl;
775
776 /* Speculative edges has three component, update all of them
777 when asked to. */
778 if (update_speculative && speculative)
779 {
780 cgraph_edge *direct, *indirect;
781 ipa_ref *ref;
782
783 speculative_call_info (direct, indirect, ref);
784 direct->set_call_stmt (new_stmt, false);
785 indirect->set_call_stmt (new_stmt, false);
786 ref->stmt = new_stmt;
787 return;
788 }
789
790 /* Only direct speculative edges go to call_site_hash. */
791 if (caller->call_site_hash
792 && (!speculative || !indirect_unknown_callee))
793 {
794 caller->call_site_hash->remove_elt_with_hash
795 (call_stmt, cgraph_edge_hasher::hash (call_stmt));
796 }
797
798 cgraph_edge *e = this;
799
800 call_stmt = new_stmt;
801 if (indirect_unknown_callee
802 && (decl = gimple_call_fndecl (new_stmt)))
803 {
804 /* Constant propagation (and possibly also inlining?) can turn an
805 indirect call into a direct one. */
806 cgraph_node *new_callee = cgraph_node::get (decl);
807
808 gcc_checking_assert (new_callee);
809 e = make_direct (new_callee);
810 }
811
812 push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
813 e->can_throw_external = stmt_can_throw_external (new_stmt);
814 pop_cfun ();
815 if (e->caller->call_site_hash)
816 cgraph_add_edge_to_call_site_hash (e);
817 }
818
819 /* Allocate a cgraph_edge structure and fill it with data according to the
820 parameters of which only CALLEE can be NULL (when creating an indirect call
821 edge). */
822
823 cgraph_edge *
824 symbol_table::create_edge (cgraph_node *caller, cgraph_node *callee,
825 gcall *call_stmt, profile_count count, int freq,
826 bool indir_unknown_callee)
827 {
828 cgraph_edge *edge;
829
830 /* LTO does not actually have access to the call_stmt since these
831 have not been loaded yet. */
832 if (call_stmt)
833 {
834 /* This is a rather expensive check possibly triggering
835 construction of call stmt hashtable. */
836 cgraph_edge *e;
837 gcc_checking_assert (!(e = caller->get_edge (call_stmt))
838 || e->speculative);
839
840 gcc_assert (is_gimple_call (call_stmt));
841 }
842
843 if (free_edges)
844 {
845 edge = free_edges;
846 free_edges = NEXT_FREE_EDGE (edge);
847 }
848 else
849 {
850 edge = ggc_alloc<cgraph_edge> ();
851 edge->uid = edges_max_uid++;
852 }
853
854 edges_count++;
855
856 edge->aux = NULL;
857 edge->caller = caller;
858 edge->callee = callee;
859 edge->prev_caller = NULL;
860 edge->next_caller = NULL;
861 edge->prev_callee = NULL;
862 edge->next_callee = NULL;
863 edge->lto_stmt_uid = 0;
864
865 edge->count = count;
866 edge->frequency = freq;
867 gcc_checking_assert (freq >= 0);
868 gcc_checking_assert (freq <= CGRAPH_FREQ_MAX);
869
870 edge->call_stmt = call_stmt;
871 push_cfun (DECL_STRUCT_FUNCTION (caller->decl));
872 edge->can_throw_external
873 = call_stmt ? stmt_can_throw_external (call_stmt) : false;
874 pop_cfun ();
875 if (call_stmt
876 && callee && callee->decl
877 && !gimple_check_call_matching_types (call_stmt, callee->decl,
878 false))
879 {
880 edge->inline_failed = CIF_MISMATCHED_ARGUMENTS;
881 edge->call_stmt_cannot_inline_p = true;
882 }
883 else
884 {
885 edge->inline_failed = CIF_FUNCTION_NOT_CONSIDERED;
886 edge->call_stmt_cannot_inline_p = false;
887 }
888
889 edge->indirect_info = NULL;
890 edge->indirect_inlining_edge = 0;
891 edge->speculative = false;
892 edge->indirect_unknown_callee = indir_unknown_callee;
893 if (opt_for_fn (edge->caller->decl, flag_devirtualize)
894 && call_stmt && DECL_STRUCT_FUNCTION (caller->decl))
895 edge->in_polymorphic_cdtor
896 = decl_maybe_in_construction_p (NULL, NULL, call_stmt,
897 caller->decl);
898 else
899 edge->in_polymorphic_cdtor = caller->thunk.thunk_p;
900 if (call_stmt && caller->call_site_hash)
901 cgraph_add_edge_to_call_site_hash (edge);
902
903 return edge;
904 }
905
906 /* Create edge from a given function to CALLEE in the cgraph. */
907
908 cgraph_edge *
909 cgraph_node::create_edge (cgraph_node *callee,
910 gcall *call_stmt, profile_count count, int freq)
911 {
912 cgraph_edge *edge = symtab->create_edge (this, callee, call_stmt, count,
913 freq, false);
914
915 initialize_inline_failed (edge);
916
917 edge->next_caller = callee->callers;
918 if (callee->callers)
919 callee->callers->prev_caller = edge;
920 edge->next_callee = callees;
921 if (callees)
922 callees->prev_callee = edge;
923 callees = edge;
924 callee->callers = edge;
925
926 return edge;
927 }
928
929 /* Allocate cgraph_indirect_call_info and set its fields to default values. */
930
931 cgraph_indirect_call_info *
932 cgraph_allocate_init_indirect_info (void)
933 {
934 cgraph_indirect_call_info *ii;
935
936 ii = ggc_cleared_alloc<cgraph_indirect_call_info> ();
937 ii->param_index = -1;
938 return ii;
939 }
940
941 /* Create an indirect edge with a yet-undetermined callee where the call
942 statement destination is a formal parameter of the caller with index
943 PARAM_INDEX. */
944
945 cgraph_edge *
946 cgraph_node::create_indirect_edge (gcall *call_stmt, int ecf_flags,
947 profile_count count, int freq,
948 bool compute_indirect_info)
949 {
950 cgraph_edge *edge = symtab->create_edge (this, NULL, call_stmt,
951 count, freq, true);
952 tree target;
953
954 initialize_inline_failed (edge);
955
956 edge->indirect_info = cgraph_allocate_init_indirect_info ();
957 edge->indirect_info->ecf_flags = ecf_flags;
958 edge->indirect_info->vptr_changed = true;
959
960 /* Record polymorphic call info. */
961 if (compute_indirect_info
962 && call_stmt
963 && (target = gimple_call_fn (call_stmt))
964 && virtual_method_call_p (target))
965 {
966 ipa_polymorphic_call_context context (decl, target, call_stmt);
967
968 /* Only record types can have virtual calls. */
969 edge->indirect_info->polymorphic = true;
970 edge->indirect_info->param_index = -1;
971 edge->indirect_info->otr_token
972 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
973 edge->indirect_info->otr_type = obj_type_ref_class (target);
974 gcc_assert (TREE_CODE (edge->indirect_info->otr_type) == RECORD_TYPE);
975 edge->indirect_info->context = context;
976 }
977
978 edge->next_callee = indirect_calls;
979 if (indirect_calls)
980 indirect_calls->prev_callee = edge;
981 indirect_calls = edge;
982
983 return edge;
984 }
985
986 /* Remove the edge from the list of the callees of the caller. */
987
988 void
989 cgraph_edge::remove_caller (void)
990 {
991 if (prev_callee)
992 prev_callee->next_callee = next_callee;
993 if (next_callee)
994 next_callee->prev_callee = prev_callee;
995 if (!prev_callee)
996 {
997 if (indirect_unknown_callee)
998 caller->indirect_calls = next_callee;
999 else
1000 caller->callees = next_callee;
1001 }
1002 if (caller->call_site_hash)
1003 caller->call_site_hash->remove_elt_with_hash
1004 (call_stmt, cgraph_edge_hasher::hash (call_stmt));
1005 }
1006
1007 /* Put the edge onto the free list. */
1008
1009 void
1010 symbol_table::free_edge (cgraph_edge *e)
1011 {
1012 int uid = e->uid;
1013
1014 if (e->indirect_info)
1015 ggc_free (e->indirect_info);
1016
1017 /* Clear out the edge so we do not dangle pointers. */
1018 memset (e, 0, sizeof (*e));
1019 e->uid = uid;
1020 NEXT_FREE_EDGE (e) = free_edges;
1021 free_edges = e;
1022 edges_count--;
1023 }
1024
1025 /* Remove the edge in the cgraph. */
1026
1027 void
1028 cgraph_edge::remove (void)
1029 {
1030 /* Call all edge removal hooks. */
1031 symtab->call_edge_removal_hooks (this);
1032
1033 if (!indirect_unknown_callee)
1034 /* Remove from callers list of the callee. */
1035 remove_callee ();
1036
1037 /* Remove from callees list of the callers. */
1038 remove_caller ();
1039
1040 /* Put the edge onto the free list. */
1041 symtab->free_edge (this);
1042 }
1043
1044 /* Turn edge into speculative call calling N2. Update
1045 the profile so the direct call is taken COUNT times
1046 with FREQUENCY.
1047
1048 At clone materialization time, the indirect call E will
1049 be expanded as:
1050
1051 if (call_dest == N2)
1052 n2 ();
1053 else
1054 call call_dest
1055
1056 At this time the function just creates the direct call,
1057 the referencd representing the if conditional and attaches
1058 them all to the orginal indirect call statement.
1059
1060 Return direct edge created. */
1061
1062 cgraph_edge *
1063 cgraph_edge::make_speculative (cgraph_node *n2, profile_count direct_count,
1064 int direct_frequency)
1065 {
1066 cgraph_node *n = caller;
1067 ipa_ref *ref = NULL;
1068 cgraph_edge *e2;
1069
1070 if (dump_file)
1071 fprintf (dump_file, "Indirect call -> speculative call %s => %s\n",
1072 n->dump_name (), n2->dump_name ());
1073 speculative = true;
1074 e2 = n->create_edge (n2, call_stmt, direct_count, direct_frequency);
1075 initialize_inline_failed (e2);
1076 e2->speculative = true;
1077 if (TREE_NOTHROW (n2->decl))
1078 e2->can_throw_external = false;
1079 else
1080 e2->can_throw_external = can_throw_external;
1081 e2->lto_stmt_uid = lto_stmt_uid;
1082 e2->in_polymorphic_cdtor = in_polymorphic_cdtor;
1083 count -= e2->count;
1084 frequency -= e2->frequency;
1085 symtab->call_edge_duplication_hooks (this, e2);
1086 ref = n->create_reference (n2, IPA_REF_ADDR, call_stmt);
1087 ref->lto_stmt_uid = lto_stmt_uid;
1088 ref->speculative = speculative;
1089 n2->mark_address_taken ();
1090 return e2;
1091 }
1092
1093 /* Speculative call consist of three components:
1094 1) an indirect edge representing the original call
1095 2) an direct edge representing the new call
1096 3) ADDR_EXPR reference representing the speculative check.
1097 All three components are attached to single statement (the indirect
1098 call) and if one of them exists, all of them must exist.
1099
1100 Given speculative call edge, return all three components.
1101 */
1102
1103 void
1104 cgraph_edge::speculative_call_info (cgraph_edge *&direct,
1105 cgraph_edge *&indirect,
1106 ipa_ref *&reference)
1107 {
1108 ipa_ref *ref;
1109 int i;
1110 cgraph_edge *e2;
1111 cgraph_edge *e = this;
1112
1113 if (!e->indirect_unknown_callee)
1114 for (e2 = e->caller->indirect_calls;
1115 e2->call_stmt != e->call_stmt || e2->lto_stmt_uid != e->lto_stmt_uid;
1116 e2 = e2->next_callee)
1117 ;
1118 else
1119 {
1120 e2 = e;
1121 /* We can take advantage of the call stmt hash. */
1122 if (e2->call_stmt)
1123 {
1124 e = e->caller->get_edge (e2->call_stmt);
1125 gcc_assert (e->speculative && !e->indirect_unknown_callee);
1126 }
1127 else
1128 for (e = e->caller->callees;
1129 e2->call_stmt != e->call_stmt
1130 || e2->lto_stmt_uid != e->lto_stmt_uid;
1131 e = e->next_callee)
1132 ;
1133 }
1134 gcc_assert (e->speculative && e2->speculative);
1135 direct = e;
1136 indirect = e2;
1137
1138 reference = NULL;
1139 for (i = 0; e->caller->iterate_reference (i, ref); i++)
1140 if (ref->speculative
1141 && ((ref->stmt && ref->stmt == e->call_stmt)
1142 || (!ref->stmt && ref->lto_stmt_uid == e->lto_stmt_uid)))
1143 {
1144 reference = ref;
1145 break;
1146 }
1147
1148 /* Speculative edge always consist of all three components - direct edge,
1149 indirect and reference. */
1150
1151 gcc_assert (e && e2 && ref);
1152 }
1153
1154 /* Speculative call edge turned out to be direct call to CALLE_DECL.
1155 Remove the speculative call sequence and return edge representing the call.
1156 It is up to caller to redirect the call as appropriate. */
1157
1158 cgraph_edge *
1159 cgraph_edge::resolve_speculation (tree callee_decl)
1160 {
1161 cgraph_edge *edge = this;
1162 cgraph_edge *e2;
1163 ipa_ref *ref;
1164
1165 gcc_assert (edge->speculative);
1166 edge->speculative_call_info (e2, edge, ref);
1167 if (!callee_decl
1168 || !ref->referred->semantically_equivalent_p
1169 (symtab_node::get (callee_decl)))
1170 {
1171 if (dump_file)
1172 {
1173 if (callee_decl)
1174 {
1175 fprintf (dump_file, "Speculative indirect call %s => %s has "
1176 "turned out to have contradicting known target ",
1177 edge->caller->dump_name (),
1178 e2->callee->dump_name ());
1179 print_generic_expr (dump_file, callee_decl);
1180 fprintf (dump_file, "\n");
1181 }
1182 else
1183 {
1184 fprintf (dump_file, "Removing speculative call %s => %s\n",
1185 edge->caller->dump_name (),
1186 e2->callee->dump_name ());
1187 }
1188 }
1189 }
1190 else
1191 {
1192 cgraph_edge *tmp = edge;
1193 if (dump_file)
1194 fprintf (dump_file, "Speculative call turned into direct call.\n");
1195 edge = e2;
1196 e2 = tmp;
1197 /* FIXME: If EDGE is inlined, we should scale up the frequencies and counts
1198 in the functions inlined through it. */
1199 }
1200 edge->count += e2->count;
1201 edge->frequency += e2->frequency;
1202 if (edge->frequency > CGRAPH_FREQ_MAX)
1203 edge->frequency = CGRAPH_FREQ_MAX;
1204 edge->speculative = false;
1205 e2->speculative = false;
1206 ref->remove_reference ();
1207 if (e2->indirect_unknown_callee || e2->inline_failed)
1208 e2->remove ();
1209 else
1210 e2->callee->remove_symbol_and_inline_clones ();
1211 if (edge->caller->call_site_hash)
1212 cgraph_update_edge_in_call_site_hash (edge);
1213 return edge;
1214 }
1215
1216 /* Make an indirect edge with an unknown callee an ordinary edge leading to
1217 CALLEE. DELTA is an integer constant that is to be added to the this
1218 pointer (first parameter) to compensate for skipping a thunk adjustment. */
1219
1220 cgraph_edge *
1221 cgraph_edge::make_direct (cgraph_node *callee)
1222 {
1223 cgraph_edge *edge = this;
1224 gcc_assert (indirect_unknown_callee);
1225
1226 /* If we are redirecting speculative call, make it non-speculative. */
1227 if (indirect_unknown_callee && speculative)
1228 {
1229 edge = edge->resolve_speculation (callee->decl);
1230
1231 /* On successful speculation just return the pre existing direct edge. */
1232 if (!indirect_unknown_callee)
1233 return edge;
1234 }
1235
1236 indirect_unknown_callee = 0;
1237 ggc_free (indirect_info);
1238 indirect_info = NULL;
1239
1240 /* Get the edge out of the indirect edge list. */
1241 if (prev_callee)
1242 prev_callee->next_callee = next_callee;
1243 if (next_callee)
1244 next_callee->prev_callee = prev_callee;
1245 if (!prev_callee)
1246 caller->indirect_calls = next_callee;
1247
1248 /* Put it into the normal callee list */
1249 prev_callee = NULL;
1250 next_callee = caller->callees;
1251 if (caller->callees)
1252 caller->callees->prev_callee = edge;
1253 caller->callees = edge;
1254
1255 /* Insert to callers list of the new callee. */
1256 edge->set_callee (callee);
1257
1258 if (call_stmt
1259 && !gimple_check_call_matching_types (call_stmt, callee->decl, false))
1260 {
1261 call_stmt_cannot_inline_p = true;
1262 inline_failed = CIF_MISMATCHED_ARGUMENTS;
1263 }
1264
1265 /* We need to re-determine the inlining status of the edge. */
1266 initialize_inline_failed (edge);
1267 return edge;
1268 }
1269
1270 /* If necessary, change the function declaration in the call statement
1271 associated with E so that it corresponds to the edge callee. */
1272
1273 gimple *
1274 cgraph_edge::redirect_call_stmt_to_callee (void)
1275 {
1276 cgraph_edge *e = this;
1277
1278 tree decl = gimple_call_fndecl (e->call_stmt);
1279 gcall *new_stmt;
1280 gimple_stmt_iterator gsi;
1281 bool skip_bounds = false;
1282
1283 if (e->speculative)
1284 {
1285 cgraph_edge *e2;
1286 gcall *new_stmt;
1287 ipa_ref *ref;
1288
1289 e->speculative_call_info (e, e2, ref);
1290 /* If there already is an direct call (i.e. as a result of inliner's
1291 substitution), forget about speculating. */
1292 if (decl)
1293 e = e->resolve_speculation (decl);
1294 /* If types do not match, speculation was likely wrong.
1295 The direct edge was possibly redirected to the clone with a different
1296 signature. We did not update the call statement yet, so compare it
1297 with the reference that still points to the proper type. */
1298 else if (!gimple_check_call_matching_types (e->call_stmt,
1299 ref->referred->decl,
1300 true))
1301 {
1302 if (dump_file)
1303 fprintf (dump_file, "Not expanding speculative call of %s -> %s\n"
1304 "Type mismatch.\n",
1305 e->caller->dump_name (),
1306 e->callee->dump_name ());
1307 e = e->resolve_speculation ();
1308 /* We are producing the final function body and will throw away the
1309 callgraph edges really soon. Reset the counts/frequencies to
1310 keep verifier happy in the case of roundoff errors. */
1311 e->count = gimple_bb (e->call_stmt)->count;
1312 e->frequency = compute_call_stmt_bb_frequency
1313 (e->caller->decl, gimple_bb (e->call_stmt));
1314 }
1315 /* Expand speculation into GIMPLE code. */
1316 else
1317 {
1318 if (dump_file)
1319 {
1320 fprintf (dump_file,
1321 "Expanding speculative call of %s -> %s count: ",
1322 e->caller->dump_name (),
1323 e->callee->dump_name ());
1324 e->count.dump (dump_file);
1325 fprintf (dump_file, "\n");
1326 }
1327 gcc_assert (e2->speculative);
1328 push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
1329
1330 profile_probability prob = e->count.probability_in (e->count
1331 + e2->count);
1332 if (prob.initialized_p ())
1333 ;
1334 else if (e->frequency || e2->frequency)
1335 prob = profile_probability::probability_in_gcov_type
1336 (e->frequency, e->frequency + e2->frequency).guessed ();
1337 else
1338 prob = profile_probability::even ();
1339 new_stmt = gimple_ic (e->call_stmt,
1340 dyn_cast<cgraph_node *> (ref->referred),
1341 prob, e->count, e->count + e2->count);
1342 e->speculative = false;
1343 e->caller->set_call_stmt_including_clones (e->call_stmt, new_stmt,
1344 false);
1345
1346 /* Fix edges for BUILT_IN_CHKP_BNDRET calls attached to the
1347 processed call stmt. */
1348 if (gimple_call_with_bounds_p (new_stmt)
1349 && gimple_call_lhs (new_stmt)
1350 && chkp_retbnd_call_by_val (gimple_call_lhs (e2->call_stmt)))
1351 {
1352 tree dresult = gimple_call_lhs (new_stmt);
1353 tree iresult = gimple_call_lhs (e2->call_stmt);
1354 gcall *dbndret = chkp_retbnd_call_by_val (dresult);
1355 gcall *ibndret = chkp_retbnd_call_by_val (iresult);
1356 struct cgraph_edge *iedge
1357 = e2->caller->cgraph_node::get_edge (ibndret);
1358 struct cgraph_edge *dedge;
1359
1360 if (dbndret)
1361 {
1362 dedge = iedge->caller->create_edge (iedge->callee,
1363 dbndret, e->count,
1364 e->frequency);
1365 dedge->frequency = compute_call_stmt_bb_frequency
1366 (dedge->caller->decl, gimple_bb (dedge->call_stmt));
1367 }
1368 iedge->frequency = compute_call_stmt_bb_frequency
1369 (iedge->caller->decl, gimple_bb (iedge->call_stmt));
1370 }
1371
1372 e->frequency = compute_call_stmt_bb_frequency
1373 (e->caller->decl, gimple_bb (e->call_stmt));
1374 e2->frequency = compute_call_stmt_bb_frequency
1375 (e2->caller->decl, gimple_bb (e2->call_stmt));
1376 e2->speculative = false;
1377 ref->speculative = false;
1378 ref->stmt = NULL;
1379 /* Indirect edges are not both in the call site hash.
1380 get it updated. */
1381 if (e->caller->call_site_hash)
1382 cgraph_update_edge_in_call_site_hash (e2);
1383 pop_cfun ();
1384 /* Continue redirecting E to proper target. */
1385 }
1386 }
1387
1388 /* We might propagate instrumented function pointer into
1389 not instrumented function and vice versa. In such a
1390 case we need to either fix function declaration or
1391 remove bounds from call statement. */
1392 if (flag_check_pointer_bounds && e->callee)
1393 skip_bounds = chkp_redirect_edge (e);
1394
1395 if (e->indirect_unknown_callee
1396 || (decl == e->callee->decl
1397 && !skip_bounds))
1398 return e->call_stmt;
1399
1400 if (flag_checking && decl)
1401 {
1402 cgraph_node *node = cgraph_node::get (decl);
1403 gcc_assert (!node || !node->clone.combined_args_to_skip);
1404 }
1405
1406 if (symtab->dump_file)
1407 {
1408 fprintf (symtab->dump_file, "updating call of %s -> %s: ",
1409 e->caller->dump_name (), e->callee->dump_name ());
1410 print_gimple_stmt (symtab->dump_file, e->call_stmt, 0, dump_flags);
1411 if (e->callee->clone.combined_args_to_skip)
1412 {
1413 fprintf (symtab->dump_file, " combined args to skip: ");
1414 dump_bitmap (symtab->dump_file,
1415 e->callee->clone.combined_args_to_skip);
1416 }
1417 }
1418
1419 if (e->callee->clone.combined_args_to_skip
1420 || skip_bounds)
1421 {
1422 int lp_nr;
1423
1424 new_stmt = e->call_stmt;
1425 if (e->callee->clone.combined_args_to_skip)
1426 new_stmt
1427 = gimple_call_copy_skip_args (new_stmt,
1428 e->callee->clone.combined_args_to_skip);
1429 if (skip_bounds)
1430 new_stmt = chkp_copy_call_skip_bounds (new_stmt);
1431
1432 tree old_fntype = gimple_call_fntype (e->call_stmt);
1433 gimple_call_set_fndecl (new_stmt, e->callee->decl);
1434 cgraph_node *origin = e->callee;
1435 while (origin->clone_of)
1436 origin = origin->clone_of;
1437
1438 if ((origin->former_clone_of
1439 && old_fntype == TREE_TYPE (origin->former_clone_of))
1440 || old_fntype == TREE_TYPE (origin->decl))
1441 gimple_call_set_fntype (new_stmt, TREE_TYPE (e->callee->decl));
1442 else
1443 {
1444 bitmap skip = e->callee->clone.combined_args_to_skip;
1445 tree t = cgraph_build_function_type_skip_args (old_fntype, skip,
1446 false);
1447 gimple_call_set_fntype (new_stmt, t);
1448 }
1449
1450 if (gimple_vdef (new_stmt)
1451 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1452 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1453
1454 gsi = gsi_for_stmt (e->call_stmt);
1455
1456 /* For optimized away parameters, add on the caller side
1457 before the call
1458 DEBUG D#X => parm_Y(D)
1459 stmts and associate D#X with parm in decl_debug_args_lookup
1460 vector to say for debug info that if parameter parm had been passed,
1461 it would have value parm_Y(D). */
1462 if (e->callee->clone.combined_args_to_skip && MAY_HAVE_DEBUG_STMTS)
1463 {
1464 vec<tree, va_gc> **debug_args
1465 = decl_debug_args_lookup (e->callee->decl);
1466 tree old_decl = gimple_call_fndecl (e->call_stmt);
1467 if (debug_args && old_decl)
1468 {
1469 tree parm;
1470 unsigned i = 0, num;
1471 unsigned len = vec_safe_length (*debug_args);
1472 unsigned nargs = gimple_call_num_args (e->call_stmt);
1473 for (parm = DECL_ARGUMENTS (old_decl), num = 0;
1474 parm && num < nargs;
1475 parm = DECL_CHAIN (parm), num++)
1476 if (bitmap_bit_p (e->callee->clone.combined_args_to_skip, num)
1477 && is_gimple_reg (parm))
1478 {
1479 unsigned last = i;
1480
1481 while (i < len && (**debug_args)[i] != DECL_ORIGIN (parm))
1482 i += 2;
1483 if (i >= len)
1484 {
1485 i = 0;
1486 while (i < last
1487 && (**debug_args)[i] != DECL_ORIGIN (parm))
1488 i += 2;
1489 if (i >= last)
1490 continue;
1491 }
1492 tree ddecl = (**debug_args)[i + 1];
1493 tree arg = gimple_call_arg (e->call_stmt, num);
1494 if (!useless_type_conversion_p (TREE_TYPE (ddecl),
1495 TREE_TYPE (arg)))
1496 {
1497 tree rhs1;
1498 if (!fold_convertible_p (TREE_TYPE (ddecl), arg))
1499 continue;
1500 if (TREE_CODE (arg) == SSA_NAME
1501 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (arg))
1502 && (rhs1
1503 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (arg)))
1504 && useless_type_conversion_p (TREE_TYPE (ddecl),
1505 TREE_TYPE (rhs1)))
1506 arg = rhs1;
1507 else
1508 arg = fold_convert (TREE_TYPE (ddecl), arg);
1509 }
1510
1511 gimple *def_temp
1512 = gimple_build_debug_bind (ddecl, unshare_expr (arg),
1513 e->call_stmt);
1514 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
1515 }
1516 }
1517 }
1518
1519 gsi_replace (&gsi, new_stmt, false);
1520 /* We need to defer cleaning EH info on the new statement to
1521 fixup-cfg. We may not have dominator information at this point
1522 and thus would end up with unreachable blocks and have no way
1523 to communicate that we need to run CFG cleanup then. */
1524 lp_nr = lookup_stmt_eh_lp (e->call_stmt);
1525 if (lp_nr != 0)
1526 {
1527 remove_stmt_from_eh_lp (e->call_stmt);
1528 add_stmt_to_eh_lp (new_stmt, lp_nr);
1529 }
1530 }
1531 else
1532 {
1533 new_stmt = e->call_stmt;
1534 gimple_call_set_fndecl (new_stmt, e->callee->decl);
1535 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1536 }
1537
1538 /* If changing the call to __cxa_pure_virtual or similar noreturn function,
1539 adjust gimple_call_fntype too. */
1540 if (gimple_call_noreturn_p (new_stmt)
1541 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (e->callee->decl)))
1542 && TYPE_ARG_TYPES (TREE_TYPE (e->callee->decl))
1543 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (e->callee->decl)))
1544 == void_type_node))
1545 gimple_call_set_fntype (new_stmt, TREE_TYPE (e->callee->decl));
1546
1547 /* If the call becomes noreturn, remove the LHS if possible. */
1548 tree lhs = gimple_call_lhs (new_stmt);
1549 if (lhs
1550 && gimple_call_noreturn_p (new_stmt)
1551 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (new_stmt)))
1552 || should_remove_lhs_p (lhs)))
1553 {
1554 if (TREE_CODE (lhs) == SSA_NAME)
1555 {
1556 tree var = create_tmp_reg_fn (DECL_STRUCT_FUNCTION (e->caller->decl),
1557 TREE_TYPE (lhs), NULL);
1558 var = get_or_create_ssa_default_def
1559 (DECL_STRUCT_FUNCTION (e->caller->decl), var);
1560 gimple *set_stmt = gimple_build_assign (lhs, var);
1561 gsi = gsi_for_stmt (new_stmt);
1562 gsi_insert_before_without_update (&gsi, set_stmt, GSI_SAME_STMT);
1563 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), set_stmt);
1564 }
1565 gimple_call_set_lhs (new_stmt, NULL_TREE);
1566 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1567 }
1568
1569 /* If new callee has no static chain, remove it. */
1570 if (gimple_call_chain (new_stmt) && !DECL_STATIC_CHAIN (e->callee->decl))
1571 {
1572 gimple_call_set_chain (new_stmt, NULL);
1573 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1574 }
1575
1576 maybe_remove_unused_call_args (DECL_STRUCT_FUNCTION (e->caller->decl),
1577 new_stmt);
1578
1579 e->caller->set_call_stmt_including_clones (e->call_stmt, new_stmt, false);
1580
1581 if (symtab->dump_file)
1582 {
1583 fprintf (symtab->dump_file, " updated to:");
1584 print_gimple_stmt (symtab->dump_file, e->call_stmt, 0, dump_flags);
1585 }
1586 return new_stmt;
1587 }
1588
1589 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1590 OLD_STMT changed into NEW_STMT. OLD_CALL is gimple_call_fndecl
1591 of OLD_STMT if it was previously call statement.
1592 If NEW_STMT is NULL, the call has been dropped without any
1593 replacement. */
1594
1595 static void
1596 cgraph_update_edges_for_call_stmt_node (cgraph_node *node,
1597 gimple *old_stmt, tree old_call,
1598 gimple *new_stmt)
1599 {
1600 tree new_call = (new_stmt && is_gimple_call (new_stmt))
1601 ? gimple_call_fndecl (new_stmt) : 0;
1602
1603 /* We are seeing indirect calls, then there is nothing to update. */
1604 if (!new_call && !old_call)
1605 return;
1606 /* See if we turned indirect call into direct call or folded call to one builtin
1607 into different builtin. */
1608 if (old_call != new_call)
1609 {
1610 cgraph_edge *e = node->get_edge (old_stmt);
1611 cgraph_edge *ne = NULL;
1612 profile_count count;
1613 int frequency;
1614
1615 if (e)
1616 {
1617 /* Keep calls marked as dead dead. */
1618 if (new_stmt && is_gimple_call (new_stmt) && e->callee
1619 && DECL_BUILT_IN_CLASS (e->callee->decl) == BUILT_IN_NORMAL
1620 && DECL_FUNCTION_CODE (e->callee->decl) == BUILT_IN_UNREACHABLE)
1621 {
1622 node->get_edge (old_stmt)->set_call_stmt
1623 (as_a <gcall *> (new_stmt));
1624 return;
1625 }
1626 /* See if the edge is already there and has the correct callee. It
1627 might be so because of indirect inlining has already updated
1628 it. We also might've cloned and redirected the edge. */
1629 if (new_call && e->callee)
1630 {
1631 cgraph_node *callee = e->callee;
1632 while (callee)
1633 {
1634 if (callee->decl == new_call
1635 || callee->former_clone_of == new_call)
1636 {
1637 e->set_call_stmt (as_a <gcall *> (new_stmt));
1638 return;
1639 }
1640 callee = callee->clone_of;
1641 }
1642 }
1643
1644 /* Otherwise remove edge and create new one; we can't simply redirect
1645 since function has changed, so inline plan and other information
1646 attached to edge is invalid. */
1647 count = e->count;
1648 frequency = e->frequency;
1649 if (e->indirect_unknown_callee || e->inline_failed)
1650 e->remove ();
1651 else
1652 e->callee->remove_symbol_and_inline_clones ();
1653 }
1654 else if (new_call)
1655 {
1656 /* We are seeing new direct call; compute profile info based on BB. */
1657 basic_block bb = gimple_bb (new_stmt);
1658 count = bb->count;
1659 frequency = compute_call_stmt_bb_frequency (current_function_decl,
1660 bb);
1661 }
1662
1663 if (new_call)
1664 {
1665 ne = node->create_edge (cgraph_node::get_create (new_call),
1666 as_a <gcall *> (new_stmt), count,
1667 frequency);
1668 gcc_assert (ne->inline_failed);
1669 }
1670 }
1671 /* We only updated the call stmt; update pointer in cgraph edge.. */
1672 else if (old_stmt != new_stmt)
1673 node->get_edge (old_stmt)->set_call_stmt (as_a <gcall *> (new_stmt));
1674 }
1675
1676 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1677 OLD_STMT changed into NEW_STMT. OLD_DECL is gimple_call_fndecl
1678 of OLD_STMT before it was updated (updating can happen inplace). */
1679
1680 void
1681 cgraph_update_edges_for_call_stmt (gimple *old_stmt, tree old_decl,
1682 gimple *new_stmt)
1683 {
1684 cgraph_node *orig = cgraph_node::get (cfun->decl);
1685 cgraph_node *node;
1686
1687 gcc_checking_assert (orig);
1688 cgraph_update_edges_for_call_stmt_node (orig, old_stmt, old_decl, new_stmt);
1689 if (orig->clones)
1690 for (node = orig->clones; node != orig;)
1691 {
1692 cgraph_update_edges_for_call_stmt_node (node, old_stmt, old_decl, new_stmt);
1693 if (node->clones)
1694 node = node->clones;
1695 else if (node->next_sibling_clone)
1696 node = node->next_sibling_clone;
1697 else
1698 {
1699 while (node != orig && !node->next_sibling_clone)
1700 node = node->clone_of;
1701 if (node != orig)
1702 node = node->next_sibling_clone;
1703 }
1704 }
1705 }
1706
1707
1708 /* Remove all callees from the node. */
1709
1710 void
1711 cgraph_node::remove_callees (void)
1712 {
1713 cgraph_edge *e, *f;
1714
1715 /* It is sufficient to remove the edges from the lists of callers of
1716 the callees. The callee list of the node can be zapped with one
1717 assignment. */
1718 for (e = callees; e; e = f)
1719 {
1720 f = e->next_callee;
1721 symtab->call_edge_removal_hooks (e);
1722 if (!e->indirect_unknown_callee)
1723 e->remove_callee ();
1724 symtab->free_edge (e);
1725 }
1726 for (e = indirect_calls; e; e = f)
1727 {
1728 f = e->next_callee;
1729 symtab->call_edge_removal_hooks (e);
1730 if (!e->indirect_unknown_callee)
1731 e->remove_callee ();
1732 symtab->free_edge (e);
1733 }
1734 indirect_calls = NULL;
1735 callees = NULL;
1736 if (call_site_hash)
1737 {
1738 call_site_hash->empty ();
1739 call_site_hash = NULL;
1740 }
1741 }
1742
1743 /* Remove all callers from the node. */
1744
1745 void
1746 cgraph_node::remove_callers (void)
1747 {
1748 cgraph_edge *e, *f;
1749
1750 /* It is sufficient to remove the edges from the lists of callees of
1751 the callers. The caller list of the node can be zapped with one
1752 assignment. */
1753 for (e = callers; e; e = f)
1754 {
1755 f = e->next_caller;
1756 symtab->call_edge_removal_hooks (e);
1757 e->remove_caller ();
1758 symtab->free_edge (e);
1759 }
1760 callers = NULL;
1761 }
1762
1763 /* Helper function for cgraph_release_function_body and free_lang_data.
1764 It releases body from function DECL without having to inspect its
1765 possibly non-existent symtab node. */
1766
1767 void
1768 release_function_body (tree decl)
1769 {
1770 function *fn = DECL_STRUCT_FUNCTION (decl);
1771 if (fn)
1772 {
1773 if (fn->cfg
1774 && loops_for_fn (fn))
1775 {
1776 fn->curr_properties &= ~PROP_loops;
1777 loop_optimizer_finalize (fn);
1778 }
1779 if (fn->gimple_df)
1780 {
1781 delete_tree_ssa (fn);
1782 fn->eh = NULL;
1783 }
1784 if (fn->cfg)
1785 {
1786 gcc_assert (!dom_info_available_p (fn, CDI_DOMINATORS));
1787 gcc_assert (!dom_info_available_p (fn, CDI_POST_DOMINATORS));
1788 delete_tree_cfg_annotations (fn);
1789 clear_edges (fn);
1790 fn->cfg = NULL;
1791 }
1792 if (fn->value_histograms)
1793 free_histograms (fn);
1794 gimple_set_body (decl, NULL);
1795 /* Struct function hangs a lot of data that would leak if we didn't
1796 removed all pointers to it. */
1797 ggc_free (fn);
1798 DECL_STRUCT_FUNCTION (decl) = NULL;
1799 }
1800 DECL_SAVED_TREE (decl) = NULL;
1801 }
1802
1803 /* Release memory used to represent body of function.
1804 Use this only for functions that are released before being translated to
1805 target code (i.e. RTL). Functions that are compiled to RTL and beyond
1806 are free'd in final.c via free_after_compilation().
1807 KEEP_ARGUMENTS are useful only if you want to rebuild body as thunk. */
1808
1809 void
1810 cgraph_node::release_body (bool keep_arguments)
1811 {
1812 ipa_transforms_to_apply.release ();
1813 if (!used_as_abstract_origin && symtab->state != PARSING)
1814 {
1815 DECL_RESULT (decl) = NULL;
1816
1817 if (!keep_arguments)
1818 DECL_ARGUMENTS (decl) = NULL;
1819 }
1820 /* If the node is abstract and needed, then do not clear
1821 DECL_INITIAL of its associated function declaration because it's
1822 needed to emit debug info later. */
1823 if (!used_as_abstract_origin && DECL_INITIAL (decl))
1824 DECL_INITIAL (decl) = error_mark_node;
1825 release_function_body (decl);
1826 if (lto_file_data)
1827 {
1828 lto_free_function_in_decl_state_for_node (this);
1829 lto_file_data = NULL;
1830 }
1831 }
1832
1833 /* Remove function from symbol table. */
1834
1835 void
1836 cgraph_node::remove (void)
1837 {
1838 cgraph_node *n;
1839 int uid = this->uid;
1840
1841 if (symtab->ipa_clones_dump_file && symtab->cloned_nodes.contains (this))
1842 fprintf (symtab->ipa_clones_dump_file,
1843 "Callgraph removal;%s;%d;%s;%d;%d\n", asm_name (), order,
1844 DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl),
1845 DECL_SOURCE_COLUMN (decl));
1846
1847 symtab->call_cgraph_removal_hooks (this);
1848 remove_callers ();
1849 remove_callees ();
1850 ipa_transforms_to_apply.release ();
1851 delete_function_version (function_version ());
1852
1853 /* Incremental inlining access removed nodes stored in the postorder list.
1854 */
1855 force_output = false;
1856 forced_by_abi = false;
1857 for (n = nested; n; n = n->next_nested)
1858 n->origin = NULL;
1859 nested = NULL;
1860 if (origin)
1861 {
1862 cgraph_node **node2 = &origin->nested;
1863
1864 while (*node2 != this)
1865 node2 = &(*node2)->next_nested;
1866 *node2 = next_nested;
1867 }
1868 unregister ();
1869 if (prev_sibling_clone)
1870 prev_sibling_clone->next_sibling_clone = next_sibling_clone;
1871 else if (clone_of)
1872 clone_of->clones = next_sibling_clone;
1873 if (next_sibling_clone)
1874 next_sibling_clone->prev_sibling_clone = prev_sibling_clone;
1875 if (clones)
1876 {
1877 cgraph_node *n, *next;
1878
1879 if (clone_of)
1880 {
1881 for (n = clones; n->next_sibling_clone; n = n->next_sibling_clone)
1882 n->clone_of = clone_of;
1883 n->clone_of = clone_of;
1884 n->next_sibling_clone = clone_of->clones;
1885 if (clone_of->clones)
1886 clone_of->clones->prev_sibling_clone = n;
1887 clone_of->clones = clones;
1888 }
1889 else
1890 {
1891 /* We are removing node with clones. This makes clones inconsistent,
1892 but assume they will be removed subsequently and just keep clone
1893 tree intact. This can happen in unreachable function removal since
1894 we remove unreachable functions in random order, not by bottom-up
1895 walk of clone trees. */
1896 for (n = clones; n; n = next)
1897 {
1898 next = n->next_sibling_clone;
1899 n->next_sibling_clone = NULL;
1900 n->prev_sibling_clone = NULL;
1901 n->clone_of = NULL;
1902 }
1903 }
1904 }
1905
1906 /* While all the clones are removed after being proceeded, the function
1907 itself is kept in the cgraph even after it is compiled. Check whether
1908 we are done with this body and reclaim it proactively if this is the case.
1909 */
1910 if (symtab->state != LTO_STREAMING)
1911 {
1912 n = cgraph_node::get (decl);
1913 if (!n
1914 || (!n->clones && !n->clone_of && !n->global.inlined_to
1915 && ((symtab->global_info_ready || in_lto_p)
1916 && (TREE_ASM_WRITTEN (n->decl)
1917 || DECL_EXTERNAL (n->decl)
1918 || !n->analyzed
1919 || (!flag_wpa && n->in_other_partition)))))
1920 release_body ();
1921 }
1922 else
1923 {
1924 lto_free_function_in_decl_state_for_node (this);
1925 lto_file_data = NULL;
1926 }
1927
1928 decl = NULL;
1929 if (call_site_hash)
1930 {
1931 call_site_hash->empty ();
1932 call_site_hash = NULL;
1933 }
1934
1935 if (instrumented_version)
1936 {
1937 instrumented_version->instrumented_version = NULL;
1938 instrumented_version = NULL;
1939 }
1940
1941 symtab->release_symbol (this, uid);
1942 }
1943
1944 /* Likewise indicate that a node is having address taken. */
1945
1946 void
1947 cgraph_node::mark_address_taken (void)
1948 {
1949 /* Indirect inlining can figure out that all uses of the address are
1950 inlined. */
1951 if (global.inlined_to)
1952 {
1953 gcc_assert (cfun->after_inlining);
1954 gcc_assert (callers->indirect_inlining_edge);
1955 return;
1956 }
1957 /* FIXME: address_taken flag is used both as a shortcut for testing whether
1958 IPA_REF_ADDR reference exists (and thus it should be set on node
1959 representing alias we take address of) and as a test whether address
1960 of the object was taken (and thus it should be set on node alias is
1961 referring to). We should remove the first use and the remove the
1962 following set. */
1963 address_taken = 1;
1964 cgraph_node *node = ultimate_alias_target ();
1965 node->address_taken = 1;
1966 }
1967
1968 /* Return local info for the compiled function. */
1969
1970 cgraph_local_info *
1971 cgraph_node::local_info (tree decl)
1972 {
1973 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1974 cgraph_node *node = get (decl);
1975 if (!node)
1976 return NULL;
1977 return &node->ultimate_alias_target ()->local;
1978 }
1979
1980 /* Return local info for the compiled function. */
1981
1982 cgraph_rtl_info *
1983 cgraph_node::rtl_info (tree decl)
1984 {
1985 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1986 cgraph_node *node = get (decl);
1987 if (!node)
1988 return NULL;
1989 enum availability avail;
1990 node = node->ultimate_alias_target (&avail);
1991 if (decl != current_function_decl
1992 && (avail < AVAIL_AVAILABLE
1993 || (node->decl != current_function_decl
1994 && !TREE_ASM_WRITTEN (node->decl))))
1995 return NULL;
1996 /* Allocate if it doesn't exist. */
1997 if (node->rtl == NULL)
1998 node->rtl = ggc_cleared_alloc<cgraph_rtl_info> ();
1999 return node->rtl;
2000 }
2001
2002 /* Return a string describing the failure REASON. */
2003
2004 const char*
2005 cgraph_inline_failed_string (cgraph_inline_failed_t reason)
2006 {
2007 #undef DEFCIFCODE
2008 #define DEFCIFCODE(code, type, string) string,
2009
2010 static const char *cif_string_table[CIF_N_REASONS] = {
2011 #include "cif-code.def"
2012 };
2013
2014 /* Signedness of an enum type is implementation defined, so cast it
2015 to unsigned before testing. */
2016 gcc_assert ((unsigned) reason < CIF_N_REASONS);
2017 return cif_string_table[reason];
2018 }
2019
2020 /* Return a type describing the failure REASON. */
2021
2022 cgraph_inline_failed_type_t
2023 cgraph_inline_failed_type (cgraph_inline_failed_t reason)
2024 {
2025 #undef DEFCIFCODE
2026 #define DEFCIFCODE(code, type, string) type,
2027
2028 static cgraph_inline_failed_type_t cif_type_table[CIF_N_REASONS] = {
2029 #include "cif-code.def"
2030 };
2031
2032 /* Signedness of an enum type is implementation defined, so cast it
2033 to unsigned before testing. */
2034 gcc_assert ((unsigned) reason < CIF_N_REASONS);
2035 return cif_type_table[reason];
2036 }
2037
2038 /* Names used to print out the availability enum. */
2039 const char * const cgraph_availability_names[] =
2040 {"unset", "not_available", "overwritable", "available", "local"};
2041
2042 /* Output flags of edge to a file F. */
2043
2044 void
2045 cgraph_edge::dump_edge_flags (FILE *f)
2046 {
2047 if (speculative)
2048 fprintf (f, "(speculative) ");
2049 if (!inline_failed)
2050 fprintf (f, "(inlined) ");
2051 if (call_stmt_cannot_inline_p)
2052 fprintf (f, "(call_stmt_cannot_inline_p) ");
2053 if (indirect_inlining_edge)
2054 fprintf (f, "(indirect_inlining) ");
2055 if (count.initialized_p ())
2056 {
2057 fprintf (f, "(");
2058 count.dump (f);
2059 fprintf (f, ")");
2060 }
2061 if (frequency)
2062 fprintf (f, "(%.2f per call) ", frequency / (double)CGRAPH_FREQ_BASE);
2063 if (can_throw_external)
2064 fprintf (f, "(can throw external) ");
2065 }
2066
2067 /* Dump call graph node to file F. */
2068
2069 void
2070 cgraph_node::dump (FILE *f)
2071 {
2072 cgraph_edge *edge;
2073
2074 dump_base (f);
2075
2076 if (global.inlined_to)
2077 fprintf (f, " Function %s is inline copy in %s\n",
2078 dump_name (),
2079 global.inlined_to->dump_name ());
2080 if (clone_of)
2081 fprintf (f, " Clone of %s\n", clone_of->dump_asm_name ());
2082 if (symtab->function_flags_ready)
2083 fprintf (f, " Availability: %s\n",
2084 cgraph_availability_names [get_availability ()]);
2085
2086 if (profile_id)
2087 fprintf (f, " Profile id: %i\n",
2088 profile_id);
2089 fprintf (f, " First run: %i\n", tp_first_run);
2090 cgraph_function_version_info *vi = function_version ();
2091 if (vi != NULL)
2092 {
2093 fprintf (f, " Version info: ");
2094 if (vi->prev != NULL)
2095 {
2096 fprintf (f, "prev: ");
2097 fprintf (f, "%s ", vi->prev->this_node->dump_asm_name ());
2098 }
2099 if (vi->next != NULL)
2100 {
2101 fprintf (f, "next: ");
2102 fprintf (f, "%s ", vi->next->this_node->dump_asm_name ());
2103 }
2104 if (vi->dispatcher_resolver != NULL_TREE)
2105 fprintf (f, "dispatcher: %s",
2106 lang_hooks.decl_printable_name (vi->dispatcher_resolver, 2));
2107
2108 fprintf (f, "\n");
2109 }
2110 fprintf (f, " Function flags:");
2111 if (count.initialized_p ())
2112 {
2113 fprintf (f, " count: ");
2114 count.dump (f);
2115 }
2116 if (origin)
2117 fprintf (f, " nested in: %s", origin->asm_name ());
2118 if (gimple_has_body_p (decl))
2119 fprintf (f, " body");
2120 if (process)
2121 fprintf (f, " process");
2122 if (local.local)
2123 fprintf (f, " local");
2124 if (local.redefined_extern_inline)
2125 fprintf (f, " redefined_extern_inline");
2126 if (only_called_at_startup)
2127 fprintf (f, " only_called_at_startup");
2128 if (only_called_at_exit)
2129 fprintf (f, " only_called_at_exit");
2130 if (tm_clone)
2131 fprintf (f, " tm_clone");
2132 if (calls_comdat_local)
2133 fprintf (f, " calls_comdat_local");
2134 if (icf_merged)
2135 fprintf (f, " icf_merged");
2136 if (merged_comdat)
2137 fprintf (f, " merged_comdat");
2138 if (split_part)
2139 fprintf (f, " split_part");
2140 if (indirect_call_target)
2141 fprintf (f, " indirect_call_target");
2142 if (nonfreeing_fn)
2143 fprintf (f, " nonfreeing_fn");
2144 if (DECL_STATIC_CONSTRUCTOR (decl))
2145 fprintf (f," static_constructor (priority:%i)", get_init_priority ());
2146 if (DECL_STATIC_DESTRUCTOR (decl))
2147 fprintf (f," static_destructor (priority:%i)", get_fini_priority ());
2148 if (frequency == NODE_FREQUENCY_HOT)
2149 fprintf (f, " hot");
2150 if (frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
2151 fprintf (f, " unlikely_executed");
2152 if (frequency == NODE_FREQUENCY_EXECUTED_ONCE)
2153 fprintf (f, " executed_once");
2154 if (only_called_at_startup)
2155 fprintf (f, " only_called_at_startup");
2156 if (only_called_at_exit)
2157 fprintf (f, " only_called_at_exit");
2158 if (opt_for_fn (decl, optimize_size))
2159 fprintf (f, " optimize_size");
2160 if (parallelized_function)
2161 fprintf (f, " parallelized_function");
2162
2163 fprintf (f, "\n");
2164
2165 if (thunk.thunk_p)
2166 {
2167 fprintf (f, " Thunk");
2168 if (thunk.alias)
2169 fprintf (f, " of %s (asm: %s)",
2170 lang_hooks.decl_printable_name (thunk.alias, 2),
2171 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk.alias)));
2172 fprintf (f, " fixed offset %i virtual value %i has "
2173 "virtual offset %i)\n",
2174 (int)thunk.fixed_offset,
2175 (int)thunk.virtual_value,
2176 (int)thunk.virtual_offset_p);
2177 }
2178 if (alias && thunk.alias
2179 && DECL_P (thunk.alias))
2180 {
2181 fprintf (f, " Alias of %s",
2182 lang_hooks.decl_printable_name (thunk.alias, 2));
2183 if (DECL_ASSEMBLER_NAME_SET_P (thunk.alias))
2184 fprintf (f, " (asm: %s)",
2185 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk.alias)));
2186 fprintf (f, "\n");
2187 }
2188
2189 fprintf (f, " Called by: ");
2190
2191 profile_count sum = profile_count::zero ();
2192 for (edge = callers; edge; edge = edge->next_caller)
2193 {
2194 fprintf (f, "%s ", edge->caller->dump_name ());
2195 edge->dump_edge_flags (f);
2196 if (edge->count.initialized_p ())
2197 sum += edge->count;
2198 }
2199
2200 fprintf (f, "\n Calls: ");
2201 for (edge = callees; edge; edge = edge->next_callee)
2202 {
2203 fprintf (f, "%s ", edge->callee->dump_name ());
2204 edge->dump_edge_flags (f);
2205 }
2206 fprintf (f, "\n");
2207
2208 if (count.initialized_p ())
2209 {
2210 bool ok = true;
2211 bool min = false;
2212 ipa_ref *ref;
2213
2214 FOR_EACH_ALIAS (this, ref)
2215 if (dyn_cast <cgraph_node *> (ref->referring)->count.initialized_p ())
2216 sum += dyn_cast <cgraph_node *> (ref->referring)->count;
2217
2218 if (global.inlined_to
2219 || (symtab->state < EXPANSION
2220 && ultimate_alias_target () == this && only_called_directly_p ()))
2221 ok = !count.differs_from_p (sum);
2222 else if (count > profile_count::from_gcov_type (100)
2223 && count < sum.apply_scale (99, 100))
2224 ok = false, min = true;
2225 if (!ok)
2226 {
2227 fprintf (f, " Invalid sum of caller counts ");
2228 sum.dump (f);
2229 if (min)
2230 fprintf (f, ", should be at most ");
2231 else
2232 fprintf (f, ", should be ");
2233 count.dump (f);
2234 fprintf (f, "\n");
2235 }
2236 }
2237
2238 for (edge = indirect_calls; edge; edge = edge->next_callee)
2239 {
2240 if (edge->indirect_info->polymorphic)
2241 {
2242 fprintf (f, " Polymorphic indirect call of type ");
2243 print_generic_expr (f, edge->indirect_info->otr_type, TDF_SLIM);
2244 fprintf (f, " token:%i", (int) edge->indirect_info->otr_token);
2245 }
2246 else
2247 fprintf (f, " Indirect call");
2248 edge->dump_edge_flags (f);
2249 if (edge->indirect_info->param_index != -1)
2250 {
2251 fprintf (f, " of param:%i", edge->indirect_info->param_index);
2252 if (edge->indirect_info->agg_contents)
2253 fprintf (f, " loaded from %s %s at offset %i",
2254 edge->indirect_info->member_ptr ? "member ptr" : "aggregate",
2255 edge->indirect_info->by_ref ? "passed by reference":"",
2256 (int)edge->indirect_info->offset);
2257 if (edge->indirect_info->vptr_changed)
2258 fprintf (f, " (vptr maybe changed)");
2259 }
2260 fprintf (f, "\n");
2261 if (edge->indirect_info->polymorphic)
2262 edge->indirect_info->context.dump (f);
2263 }
2264
2265 if (instrumentation_clone)
2266 fprintf (f, " Is instrumented version.\n");
2267 else if (instrumented_version)
2268 fprintf (f, " Has instrumented version.\n");
2269 }
2270
2271 /* Dump call graph node NODE to stderr. */
2272
2273 DEBUG_FUNCTION void
2274 cgraph_node::debug (void)
2275 {
2276 dump (stderr);
2277 }
2278
2279 /* Dump the callgraph to file F. */
2280
2281 void
2282 cgraph_node::dump_cgraph (FILE *f)
2283 {
2284 cgraph_node *node;
2285
2286 fprintf (f, "callgraph:\n\n");
2287 FOR_EACH_FUNCTION (node)
2288 node->dump (f);
2289 }
2290
2291 /* Return true when the DECL can possibly be inlined. */
2292
2293 bool
2294 cgraph_function_possibly_inlined_p (tree decl)
2295 {
2296 if (!symtab->global_info_ready)
2297 return !DECL_UNINLINABLE (decl);
2298 return DECL_POSSIBLY_INLINED (decl);
2299 }
2300
2301 /* cgraph_node is no longer nested function; update cgraph accordingly. */
2302 void
2303 cgraph_node::unnest (void)
2304 {
2305 cgraph_node **node2 = &origin->nested;
2306 gcc_assert (origin);
2307
2308 while (*node2 != this)
2309 node2 = &(*node2)->next_nested;
2310 *node2 = next_nested;
2311 origin = NULL;
2312 }
2313
2314 /* Return function availability. See cgraph.h for description of individual
2315 return values. */
2316 enum availability
2317 cgraph_node::get_availability (symtab_node *ref)
2318 {
2319 if (ref)
2320 {
2321 cgraph_node *cref = dyn_cast <cgraph_node *> (ref);
2322 if (cref)
2323 ref = cref->global.inlined_to;
2324 }
2325 enum availability avail;
2326 if (!analyzed)
2327 avail = AVAIL_NOT_AVAILABLE;
2328 else if (local.local)
2329 avail = AVAIL_LOCAL;
2330 else if (global.inlined_to)
2331 avail = AVAIL_AVAILABLE;
2332 else if (transparent_alias)
2333 ultimate_alias_target (&avail, ref);
2334 else if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (decl))
2335 || lookup_attribute ("noipa", DECL_ATTRIBUTES (decl)))
2336 avail = AVAIL_INTERPOSABLE;
2337 else if (!externally_visible)
2338 avail = AVAIL_AVAILABLE;
2339 /* If this is a reference from symbol itself and there are no aliases, we
2340 may be sure that the symbol was not interposed by something else because
2341 the symbol itself would be unreachable otherwise.
2342
2343 Also comdat groups are always resolved in groups. */
2344 else if ((this == ref && !has_aliases_p ())
2345 || (ref && get_comdat_group ()
2346 && get_comdat_group () == ref->get_comdat_group ()))
2347 avail = AVAIL_AVAILABLE;
2348 /* Inline functions are safe to be analyzed even if their symbol can
2349 be overwritten at runtime. It is not meaningful to enforce any sane
2350 behavior on replacing inline function by different body. */
2351 else if (DECL_DECLARED_INLINE_P (decl))
2352 avail = AVAIL_AVAILABLE;
2353
2354 /* If the function can be overwritten, return OVERWRITABLE. Take
2355 care at least of two notable extensions - the COMDAT functions
2356 used to share template instantiations in C++ (this is symmetric
2357 to code cp_cannot_inline_tree_fn and probably shall be shared and
2358 the inlinability hooks completely eliminated). */
2359
2360 else if (decl_replaceable_p (decl) && !DECL_EXTERNAL (decl))
2361 avail = AVAIL_INTERPOSABLE;
2362 else avail = AVAIL_AVAILABLE;
2363
2364 return avail;
2365 }
2366
2367 /* Worker for cgraph_node_can_be_local_p. */
2368 static bool
2369 cgraph_node_cannot_be_local_p_1 (cgraph_node *node, void *)
2370 {
2371 return !(!node->force_output
2372 && ((DECL_COMDAT (node->decl)
2373 && !node->forced_by_abi
2374 && !node->used_from_object_file_p ()
2375 && !node->same_comdat_group)
2376 || !node->externally_visible));
2377 }
2378
2379 /* Return true if cgraph_node can be made local for API change.
2380 Extern inline functions and C++ COMDAT functions can be made local
2381 at the expense of possible code size growth if function is used in multiple
2382 compilation units. */
2383 bool
2384 cgraph_node::can_be_local_p (void)
2385 {
2386 return (!address_taken
2387 && !call_for_symbol_thunks_and_aliases (cgraph_node_cannot_be_local_p_1,
2388 NULL, true));
2389 }
2390
2391 /* Call callback on cgraph_node, thunks and aliases associated to cgraph_node.
2392 When INCLUDE_OVERWRITABLE is false, overwritable symbols are
2393 skipped. When EXCLUDE_VIRTUAL_THUNKS is true, virtual thunks are
2394 skipped. */
2395 bool
2396 cgraph_node::call_for_symbol_thunks_and_aliases (bool (*callback)
2397 (cgraph_node *, void *),
2398 void *data,
2399 bool include_overwritable,
2400 bool exclude_virtual_thunks)
2401 {
2402 cgraph_edge *e;
2403 ipa_ref *ref;
2404 enum availability avail = AVAIL_AVAILABLE;
2405
2406 if (include_overwritable
2407 || (avail = get_availability ()) > AVAIL_INTERPOSABLE)
2408 {
2409 if (callback (this, data))
2410 return true;
2411 }
2412 FOR_EACH_ALIAS (this, ref)
2413 {
2414 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2415 if (include_overwritable
2416 || alias->get_availability () > AVAIL_INTERPOSABLE)
2417 if (alias->call_for_symbol_thunks_and_aliases (callback, data,
2418 include_overwritable,
2419 exclude_virtual_thunks))
2420 return true;
2421 }
2422 if (avail <= AVAIL_INTERPOSABLE)
2423 return false;
2424 for (e = callers; e; e = e->next_caller)
2425 if (e->caller->thunk.thunk_p
2426 && (include_overwritable
2427 || e->caller->get_availability () > AVAIL_INTERPOSABLE)
2428 && !(exclude_virtual_thunks
2429 && e->caller->thunk.virtual_offset_p))
2430 if (e->caller->call_for_symbol_thunks_and_aliases (callback, data,
2431 include_overwritable,
2432 exclude_virtual_thunks))
2433 return true;
2434
2435 return false;
2436 }
2437
2438 /* Worker to bring NODE local. */
2439
2440 bool
2441 cgraph_node::make_local (cgraph_node *node, void *)
2442 {
2443 gcc_checking_assert (node->can_be_local_p ());
2444 if (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
2445 {
2446 node->make_decl_local ();
2447 node->set_section (NULL);
2448 node->set_comdat_group (NULL);
2449 node->externally_visible = false;
2450 node->forced_by_abi = false;
2451 node->local.local = true;
2452 node->set_section (NULL);
2453 node->unique_name = ((node->resolution == LDPR_PREVAILING_DEF_IRONLY
2454 || node->resolution == LDPR_PREVAILING_DEF_IRONLY_EXP)
2455 && !flag_incremental_link);
2456 node->resolution = LDPR_PREVAILING_DEF_IRONLY;
2457 gcc_assert (node->get_availability () == AVAIL_LOCAL);
2458 }
2459 return false;
2460 }
2461
2462 /* Bring cgraph node local. */
2463
2464 void
2465 cgraph_node::make_local (void)
2466 {
2467 call_for_symbol_thunks_and_aliases (cgraph_node::make_local, NULL, true);
2468 }
2469
2470 /* Worker to set nothrow flag. */
2471
2472 static void
2473 set_nothrow_flag_1 (cgraph_node *node, bool nothrow, bool non_call,
2474 bool *changed)
2475 {
2476 cgraph_edge *e;
2477
2478 if (nothrow && !TREE_NOTHROW (node->decl))
2479 {
2480 /* With non-call exceptions we can't say for sure if other function body
2481 was not possibly optimized to stil throw. */
2482 if (!non_call || node->binds_to_current_def_p ())
2483 {
2484 TREE_NOTHROW (node->decl) = true;
2485 *changed = true;
2486 for (e = node->callers; e; e = e->next_caller)
2487 e->can_throw_external = false;
2488 }
2489 }
2490 else if (!nothrow && TREE_NOTHROW (node->decl))
2491 {
2492 TREE_NOTHROW (node->decl) = false;
2493 *changed = true;
2494 }
2495 ipa_ref *ref;
2496 FOR_EACH_ALIAS (node, ref)
2497 {
2498 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2499 if (!nothrow || alias->get_availability () > AVAIL_INTERPOSABLE)
2500 set_nothrow_flag_1 (alias, nothrow, non_call, changed);
2501 }
2502 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
2503 if (e->caller->thunk.thunk_p
2504 && (!nothrow || e->caller->get_availability () > AVAIL_INTERPOSABLE))
2505 set_nothrow_flag_1 (e->caller, nothrow, non_call, changed);
2506 }
2507
2508 /* Set TREE_NOTHROW on NODE's decl and on aliases of NODE
2509 if any to NOTHROW. */
2510
2511 bool
2512 cgraph_node::set_nothrow_flag (bool nothrow)
2513 {
2514 bool changed = false;
2515 bool non_call = opt_for_fn (decl, flag_non_call_exceptions);
2516
2517 if (!nothrow || get_availability () > AVAIL_INTERPOSABLE)
2518 set_nothrow_flag_1 (this, nothrow, non_call, &changed);
2519 else
2520 {
2521 ipa_ref *ref;
2522
2523 FOR_EACH_ALIAS (this, ref)
2524 {
2525 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2526 if (!nothrow || alias->get_availability () > AVAIL_INTERPOSABLE)
2527 set_nothrow_flag_1 (alias, nothrow, non_call, &changed);
2528 }
2529 }
2530 return changed;
2531 }
2532
2533 /* Worker to set malloc flag. */
2534 static void
2535 set_malloc_flag_1 (cgraph_node *node, bool malloc_p, bool *changed)
2536 {
2537 if (malloc_p && !DECL_IS_MALLOC (node->decl))
2538 {
2539 DECL_IS_MALLOC (node->decl) = true;
2540 *changed = true;
2541 }
2542
2543 ipa_ref *ref;
2544 FOR_EACH_ALIAS (node, ref)
2545 {
2546 cgraph_node *alias = dyn_cast<cgraph_node *> (ref->referring);
2547 if (!malloc_p || alias->get_availability () > AVAIL_INTERPOSABLE)
2548 set_malloc_flag_1 (alias, malloc_p, changed);
2549 }
2550
2551 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
2552 if (e->caller->thunk.thunk_p
2553 && (!malloc_p || e->caller->get_availability () > AVAIL_INTERPOSABLE))
2554 set_malloc_flag_1 (e->caller, malloc_p, changed);
2555 }
2556
2557 /* Set DECL_IS_MALLOC on NODE's decl and on NODE's aliases if any. */
2558
2559 bool
2560 cgraph_node::set_malloc_flag (bool malloc_p)
2561 {
2562 bool changed = false;
2563
2564 if (!malloc_p || get_availability () > AVAIL_INTERPOSABLE)
2565 set_malloc_flag_1 (this, malloc_p, &changed);
2566 else
2567 {
2568 ipa_ref *ref;
2569
2570 FOR_EACH_ALIAS (this, ref)
2571 {
2572 cgraph_node *alias = dyn_cast<cgraph_node *> (ref->referring);
2573 if (!malloc_p || alias->get_availability () > AVAIL_INTERPOSABLE)
2574 set_malloc_flag_1 (alias, malloc_p, &changed);
2575 }
2576 }
2577 return changed;
2578 }
2579
2580 /* Worker to set_const_flag. */
2581
2582 static void
2583 set_const_flag_1 (cgraph_node *node, bool set_const, bool looping,
2584 bool *changed)
2585 {
2586 /* Static constructors and destructors without a side effect can be
2587 optimized out. */
2588 if (set_const && !looping)
2589 {
2590 if (DECL_STATIC_CONSTRUCTOR (node->decl))
2591 {
2592 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2593 *changed = true;
2594 }
2595 if (DECL_STATIC_DESTRUCTOR (node->decl))
2596 {
2597 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2598 *changed = true;
2599 }
2600 }
2601 if (!set_const)
2602 {
2603 if (TREE_READONLY (node->decl))
2604 {
2605 TREE_READONLY (node->decl) = 0;
2606 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2607 *changed = true;
2608 }
2609 }
2610 else
2611 {
2612 /* Consider function:
2613
2614 bool a(int *p)
2615 {
2616 return *p==*p;
2617 }
2618
2619 During early optimization we will turn this into:
2620
2621 bool a(int *p)
2622 {
2623 return true;
2624 }
2625
2626 Now if this function will be detected as CONST however when interposed
2627 it may end up being just pure. We always must assume the worst
2628 scenario here. */
2629 if (TREE_READONLY (node->decl))
2630 {
2631 if (!looping && DECL_LOOPING_CONST_OR_PURE_P (node->decl))
2632 {
2633 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2634 *changed = true;
2635 }
2636 }
2637 else if (node->binds_to_current_def_p ())
2638 {
2639 TREE_READONLY (node->decl) = true;
2640 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = looping;
2641 DECL_PURE_P (node->decl) = false;
2642 *changed = true;
2643 }
2644 else
2645 {
2646 if (dump_file && (dump_flags & TDF_DETAILS))
2647 fprintf (dump_file, "Dropping state to PURE because function does "
2648 "not bind to current def.\n");
2649 if (!DECL_PURE_P (node->decl))
2650 {
2651 DECL_PURE_P (node->decl) = true;
2652 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = looping;
2653 *changed = true;
2654 }
2655 else if (!looping && DECL_LOOPING_CONST_OR_PURE_P (node->decl))
2656 {
2657 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2658 *changed = true;
2659 }
2660 }
2661 }
2662
2663 ipa_ref *ref;
2664 FOR_EACH_ALIAS (node, ref)
2665 {
2666 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2667 if (!set_const || alias->get_availability () > AVAIL_INTERPOSABLE)
2668 set_const_flag_1 (alias, set_const, looping, changed);
2669 }
2670 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
2671 if (e->caller->thunk.thunk_p
2672 && (!set_const || e->caller->get_availability () > AVAIL_INTERPOSABLE))
2673 {
2674 /* Virtual thunks access virtual offset in the vtable, so they can
2675 only be pure, never const. */
2676 if (set_const
2677 && (e->caller->thunk.virtual_offset_p
2678 || !node->binds_to_current_def_p (e->caller)))
2679 *changed |= e->caller->set_pure_flag (true, looping);
2680 else
2681 set_const_flag_1 (e->caller, set_const, looping, changed);
2682 }
2683 }
2684
2685 /* If SET_CONST is true, mark function, aliases and thunks to be ECF_CONST.
2686 If SET_CONST if false, clear the flag.
2687
2688 When setting the flag be careful about possible interposition and
2689 do not set the flag for functions that can be interposet and set pure
2690 flag for functions that can bind to other definition.
2691
2692 Return true if any change was done. */
2693
2694 bool
2695 cgraph_node::set_const_flag (bool set_const, bool looping)
2696 {
2697 bool changed = false;
2698 if (!set_const || get_availability () > AVAIL_INTERPOSABLE)
2699 set_const_flag_1 (this, set_const, looping, &changed);
2700 else
2701 {
2702 ipa_ref *ref;
2703
2704 FOR_EACH_ALIAS (this, ref)
2705 {
2706 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2707 if (!set_const || alias->get_availability () > AVAIL_INTERPOSABLE)
2708 set_const_flag_1 (alias, set_const, looping, &changed);
2709 }
2710 }
2711 return changed;
2712 }
2713
2714 /* Info used by set_pure_flag_1. */
2715
2716 struct set_pure_flag_info
2717 {
2718 bool pure;
2719 bool looping;
2720 bool changed;
2721 };
2722
2723 /* Worker to set_pure_flag. */
2724
2725 static bool
2726 set_pure_flag_1 (cgraph_node *node, void *data)
2727 {
2728 struct set_pure_flag_info *info = (struct set_pure_flag_info *)data;
2729 /* Static constructors and destructors without a side effect can be
2730 optimized out. */
2731 if (info->pure && !info->looping)
2732 {
2733 if (DECL_STATIC_CONSTRUCTOR (node->decl))
2734 {
2735 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2736 info->changed = true;
2737 }
2738 if (DECL_STATIC_DESTRUCTOR (node->decl))
2739 {
2740 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2741 info->changed = true;
2742 }
2743 }
2744 if (info->pure)
2745 {
2746 if (!DECL_PURE_P (node->decl) && !TREE_READONLY (node->decl))
2747 {
2748 DECL_PURE_P (node->decl) = true;
2749 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = info->looping;
2750 info->changed = true;
2751 }
2752 else if (DECL_LOOPING_CONST_OR_PURE_P (node->decl)
2753 && !info->looping)
2754 {
2755 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2756 info->changed = true;
2757 }
2758 }
2759 else
2760 {
2761 if (DECL_PURE_P (node->decl))
2762 {
2763 DECL_PURE_P (node->decl) = false;
2764 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2765 info->changed = true;
2766 }
2767 }
2768 return false;
2769 }
2770
2771 /* Set DECL_PURE_P on cgraph_node's decl and on aliases of the node
2772 if any to PURE.
2773
2774 When setting the flag, be careful about possible interposition.
2775 Return true if any change was done. */
2776
2777 bool
2778 cgraph_node::set_pure_flag (bool pure, bool looping)
2779 {
2780 struct set_pure_flag_info info = {pure, looping, false};
2781 if (!pure)
2782 looping = false;
2783 call_for_symbol_thunks_and_aliases (set_pure_flag_1, &info, !pure, true);
2784 return info.changed;
2785 }
2786
2787 /* Return true when cgraph_node can not return or throw and thus
2788 it is safe to ignore its side effects for IPA analysis. */
2789
2790 bool
2791 cgraph_node::cannot_return_p (void)
2792 {
2793 int flags = flags_from_decl_or_type (decl);
2794 if (!opt_for_fn (decl, flag_exceptions))
2795 return (flags & ECF_NORETURN) != 0;
2796 else
2797 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2798 == (ECF_NORETURN | ECF_NOTHROW));
2799 }
2800
2801 /* Return true when call of edge can not lead to return from caller
2802 and thus it is safe to ignore its side effects for IPA analysis
2803 when computing side effects of the caller.
2804 FIXME: We could actually mark all edges that have no reaching
2805 patch to the exit block or throw to get better results. */
2806 bool
2807 cgraph_edge::cannot_lead_to_return_p (void)
2808 {
2809 if (caller->cannot_return_p ())
2810 return true;
2811 if (indirect_unknown_callee)
2812 {
2813 int flags = indirect_info->ecf_flags;
2814 if (!opt_for_fn (caller->decl, flag_exceptions))
2815 return (flags & ECF_NORETURN) != 0;
2816 else
2817 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2818 == (ECF_NORETURN | ECF_NOTHROW));
2819 }
2820 else
2821 return callee->cannot_return_p ();
2822 }
2823
2824 /* Return true if the call can be hot. */
2825
2826 bool
2827 cgraph_edge::maybe_hot_p (void)
2828 {
2829 if (!maybe_hot_count_p (NULL, count))
2830 return false;
2831 if (caller->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED
2832 || (callee
2833 && callee->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED))
2834 return false;
2835 if (caller->frequency > NODE_FREQUENCY_UNLIKELY_EXECUTED
2836 && (callee
2837 && callee->frequency <= NODE_FREQUENCY_EXECUTED_ONCE))
2838 return false;
2839 if (opt_for_fn (caller->decl, optimize_size))
2840 return false;
2841 if (caller->frequency == NODE_FREQUENCY_HOT)
2842 return true;
2843 /* If profile is now known yet, be conservative.
2844 FIXME: this predicate is used by early inliner and can do better there. */
2845 if (symtab->state < IPA_SSA)
2846 return true;
2847 if (caller->frequency == NODE_FREQUENCY_EXECUTED_ONCE
2848 && frequency < CGRAPH_FREQ_BASE * 3 / 2)
2849 return false;
2850 if (opt_for_fn (caller->decl, flag_guess_branch_prob))
2851 {
2852 if (PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION) == 0
2853 || frequency <= (CGRAPH_FREQ_BASE
2854 / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION)))
2855 return false;
2856 }
2857 return true;
2858 }
2859
2860 /* Worker for cgraph_can_remove_if_no_direct_calls_p. */
2861
2862 static bool
2863 nonremovable_p (cgraph_node *node, void *)
2864 {
2865 return !node->can_remove_if_no_direct_calls_and_refs_p ();
2866 }
2867
2868 /* Return true if whole comdat group can be removed if there are no direct
2869 calls to THIS. */
2870
2871 bool
2872 cgraph_node::can_remove_if_no_direct_calls_p (bool will_inline)
2873 {
2874 struct ipa_ref *ref;
2875
2876 /* For local symbols or non-comdat group it is the same as
2877 can_remove_if_no_direct_calls_p. */
2878 if (!externally_visible || !same_comdat_group)
2879 {
2880 if (DECL_EXTERNAL (decl))
2881 return true;
2882 if (address_taken)
2883 return false;
2884 return !call_for_symbol_and_aliases (nonremovable_p, NULL, true);
2885 }
2886
2887 if (will_inline && address_taken)
2888 return false;
2889
2890 /* Otheriwse check if we can remove the symbol itself and then verify
2891 that only uses of the comdat groups are direct call to THIS
2892 or its aliases. */
2893 if (!can_remove_if_no_direct_calls_and_refs_p ())
2894 return false;
2895
2896 /* Check that all refs come from within the comdat group. */
2897 for (int i = 0; iterate_referring (i, ref); i++)
2898 if (ref->referring->get_comdat_group () != get_comdat_group ())
2899 return false;
2900
2901 struct cgraph_node *target = ultimate_alias_target ();
2902 for (cgraph_node *next = dyn_cast<cgraph_node *> (same_comdat_group);
2903 next != this; next = dyn_cast<cgraph_node *> (next->same_comdat_group))
2904 {
2905 if (!externally_visible)
2906 continue;
2907 if (!next->alias
2908 && !next->can_remove_if_no_direct_calls_and_refs_p ())
2909 return false;
2910
2911 /* If we see different symbol than THIS, be sure to check calls. */
2912 if (next->ultimate_alias_target () != target)
2913 for (cgraph_edge *e = next->callers; e; e = e->next_caller)
2914 if (e->caller->get_comdat_group () != get_comdat_group ()
2915 || will_inline)
2916 return false;
2917
2918 /* If function is not being inlined, we care only about
2919 references outside of the comdat group. */
2920 if (!will_inline)
2921 for (int i = 0; next->iterate_referring (i, ref); i++)
2922 if (ref->referring->get_comdat_group () != get_comdat_group ())
2923 return false;
2924 }
2925 return true;
2926 }
2927
2928 /* Return true when function cgraph_node can be expected to be removed
2929 from program when direct calls in this compilation unit are removed.
2930
2931 As a special case COMDAT functions are
2932 cgraph_can_remove_if_no_direct_calls_p while the are not
2933 cgraph_only_called_directly_p (it is possible they are called from other
2934 unit)
2935
2936 This function behaves as cgraph_only_called_directly_p because eliminating
2937 all uses of COMDAT function does not make it necessarily disappear from
2938 the program unless we are compiling whole program or we do LTO. In this
2939 case we know we win since dynamic linking will not really discard the
2940 linkonce section. */
2941
2942 bool
2943 cgraph_node::will_be_removed_from_program_if_no_direct_calls_p
2944 (bool will_inline)
2945 {
2946 gcc_assert (!global.inlined_to);
2947 if (DECL_EXTERNAL (decl))
2948 return true;
2949
2950 if (!in_lto_p && !flag_whole_program)
2951 {
2952 /* If the symbol is in comdat group, we need to verify that whole comdat
2953 group becomes unreachable. Technically we could skip references from
2954 within the group, too. */
2955 if (!only_called_directly_p ())
2956 return false;
2957 if (same_comdat_group && externally_visible)
2958 {
2959 struct cgraph_node *target = ultimate_alias_target ();
2960
2961 if (will_inline && address_taken)
2962 return true;
2963 for (cgraph_node *next = dyn_cast<cgraph_node *> (same_comdat_group);
2964 next != this;
2965 next = dyn_cast<cgraph_node *> (next->same_comdat_group))
2966 {
2967 if (!externally_visible)
2968 continue;
2969 if (!next->alias
2970 && !next->only_called_directly_p ())
2971 return false;
2972
2973 /* If we see different symbol than THIS,
2974 be sure to check calls. */
2975 if (next->ultimate_alias_target () != target)
2976 for (cgraph_edge *e = next->callers; e; e = e->next_caller)
2977 if (e->caller->get_comdat_group () != get_comdat_group ()
2978 || will_inline)
2979 return false;
2980 }
2981 }
2982 return true;
2983 }
2984 else
2985 return can_remove_if_no_direct_calls_p (will_inline);
2986 }
2987
2988
2989 /* Worker for cgraph_only_called_directly_p. */
2990
2991 static bool
2992 cgraph_not_only_called_directly_p_1 (cgraph_node *node, void *)
2993 {
2994 return !node->only_called_directly_or_aliased_p ();
2995 }
2996
2997 /* Return true when function cgraph_node and all its aliases are only called
2998 directly.
2999 i.e. it is not externally visible, address was not taken and
3000 it is not used in any other non-standard way. */
3001
3002 bool
3003 cgraph_node::only_called_directly_p (void)
3004 {
3005 gcc_assert (ultimate_alias_target () == this);
3006 return !call_for_symbol_and_aliases (cgraph_not_only_called_directly_p_1,
3007 NULL, true);
3008 }
3009
3010
3011 /* Collect all callers of NODE. Worker for collect_callers_of_node. */
3012
3013 static bool
3014 collect_callers_of_node_1 (cgraph_node *node, void *data)
3015 {
3016 vec<cgraph_edge *> *redirect_callers = (vec<cgraph_edge *> *)data;
3017 cgraph_edge *cs;
3018 enum availability avail;
3019 node->ultimate_alias_target (&avail);
3020
3021 if (avail > AVAIL_INTERPOSABLE)
3022 for (cs = node->callers; cs != NULL; cs = cs->next_caller)
3023 if (!cs->indirect_inlining_edge
3024 && !cs->caller->thunk.thunk_p)
3025 redirect_callers->safe_push (cs);
3026 return false;
3027 }
3028
3029 /* Collect all callers of cgraph_node and its aliases that are known to lead to
3030 cgraph_node (i.e. are not overwritable). */
3031
3032 vec<cgraph_edge *>
3033 cgraph_node::collect_callers (void)
3034 {
3035 vec<cgraph_edge *> redirect_callers = vNULL;
3036 call_for_symbol_thunks_and_aliases (collect_callers_of_node_1,
3037 &redirect_callers, false);
3038 return redirect_callers;
3039 }
3040
3041 /* Return TRUE if NODE2 a clone of NODE or is equivalent to it. */
3042
3043 static bool
3044 clone_of_p (cgraph_node *node, cgraph_node *node2)
3045 {
3046 bool skipped_thunk = false;
3047 node = node->ultimate_alias_target ();
3048 node2 = node2->ultimate_alias_target ();
3049
3050 /* There are no virtual clones of thunks so check former_clone_of or if we
3051 might have skipped thunks because this adjustments are no longer
3052 necessary. */
3053 while (node->thunk.thunk_p)
3054 {
3055 if (node2->former_clone_of == node->decl)
3056 return true;
3057 if (!node->thunk.this_adjusting)
3058 return false;
3059 node = node->callees->callee->ultimate_alias_target ();
3060 skipped_thunk = true;
3061 }
3062
3063 if (skipped_thunk)
3064 {
3065 if (!node2->clone.args_to_skip
3066 || !bitmap_bit_p (node2->clone.args_to_skip, 0))
3067 return false;
3068 if (node2->former_clone_of == node->decl)
3069 return true;
3070 else if (!node2->clone_of)
3071 return false;
3072 }
3073
3074 while (node != node2 && node2)
3075 node2 = node2->clone_of;
3076 return node2 != NULL;
3077 }
3078
3079 /* Verify edge count and frequency. */
3080
3081 bool
3082 cgraph_edge::verify_count_and_frequency ()
3083 {
3084 bool error_found = false;
3085 if (count < 0)
3086 {
3087 error ("caller edge count is negative");
3088 error_found = true;
3089 }
3090 if (frequency < 0)
3091 {
3092 error ("caller edge frequency is negative");
3093 error_found = true;
3094 }
3095 if (frequency > CGRAPH_FREQ_MAX)
3096 {
3097 error ("caller edge frequency is too large");
3098 error_found = true;
3099 }
3100 return error_found;
3101 }
3102
3103 /* Switch to THIS_CFUN if needed and print STMT to stderr. */
3104 static void
3105 cgraph_debug_gimple_stmt (function *this_cfun, gimple *stmt)
3106 {
3107 bool fndecl_was_null = false;
3108 /* debug_gimple_stmt needs correct cfun */
3109 if (cfun != this_cfun)
3110 set_cfun (this_cfun);
3111 /* ...and an actual current_function_decl */
3112 if (!current_function_decl)
3113 {
3114 current_function_decl = this_cfun->decl;
3115 fndecl_was_null = true;
3116 }
3117 debug_gimple_stmt (stmt);
3118 if (fndecl_was_null)
3119 current_function_decl = NULL;
3120 }
3121
3122 /* Verify that call graph edge corresponds to DECL from the associated
3123 statement. Return true if the verification should fail. */
3124
3125 bool
3126 cgraph_edge::verify_corresponds_to_fndecl (tree decl)
3127 {
3128 cgraph_node *node;
3129
3130 if (!decl || callee->global.inlined_to)
3131 return false;
3132 if (symtab->state == LTO_STREAMING)
3133 return false;
3134 node = cgraph_node::get (decl);
3135
3136 /* We do not know if a node from a different partition is an alias or what it
3137 aliases and therefore cannot do the former_clone_of check reliably. When
3138 body_removed is set, we have lost all information about what was alias or
3139 thunk of and also cannot proceed. */
3140 if (!node
3141 || node->body_removed
3142 || node->in_other_partition
3143 || callee->icf_merged
3144 || callee->in_other_partition)
3145 return false;
3146
3147 node = node->ultimate_alias_target ();
3148
3149 /* Optimizers can redirect unreachable calls or calls triggering undefined
3150 behavior to builtin_unreachable. */
3151 if (DECL_BUILT_IN_CLASS (callee->decl) == BUILT_IN_NORMAL
3152 && DECL_FUNCTION_CODE (callee->decl) == BUILT_IN_UNREACHABLE)
3153 return false;
3154
3155 if (callee->former_clone_of != node->decl
3156 && (node != callee->ultimate_alias_target ())
3157 && !clone_of_p (node, callee))
3158 return true;
3159 else
3160 return false;
3161 }
3162
3163 /* Verify cgraph nodes of given cgraph node. */
3164 DEBUG_FUNCTION void
3165 cgraph_node::verify_node (void)
3166 {
3167 cgraph_edge *e;
3168 function *this_cfun = DECL_STRUCT_FUNCTION (decl);
3169 basic_block this_block;
3170 gimple_stmt_iterator gsi;
3171 bool error_found = false;
3172
3173 if (seen_error ())
3174 return;
3175
3176 timevar_push (TV_CGRAPH_VERIFY);
3177 error_found |= verify_base ();
3178 for (e = callees; e; e = e->next_callee)
3179 if (e->aux)
3180 {
3181 error ("aux field set for edge %s->%s",
3182 identifier_to_locale (e->caller->name ()),
3183 identifier_to_locale (e->callee->name ()));
3184 error_found = true;
3185 }
3186 if (count < 0)
3187 {
3188 error ("execution count is negative");
3189 error_found = true;
3190 }
3191 if (global.inlined_to && same_comdat_group)
3192 {
3193 error ("inline clone in same comdat group list");
3194 error_found = true;
3195 }
3196 if (!definition && !in_other_partition && local.local)
3197 {
3198 error ("local symbols must be defined");
3199 error_found = true;
3200 }
3201 if (global.inlined_to && externally_visible)
3202 {
3203 error ("externally visible inline clone");
3204 error_found = true;
3205 }
3206 if (global.inlined_to && address_taken)
3207 {
3208 error ("inline clone with address taken");
3209 error_found = true;
3210 }
3211 if (global.inlined_to && force_output)
3212 {
3213 error ("inline clone is forced to output");
3214 error_found = true;
3215 }
3216 for (e = indirect_calls; e; e = e->next_callee)
3217 {
3218 if (e->aux)
3219 {
3220 error ("aux field set for indirect edge from %s",
3221 identifier_to_locale (e->caller->name ()));
3222 error_found = true;
3223 }
3224 if (!e->indirect_unknown_callee
3225 || !e->indirect_info)
3226 {
3227 error ("An indirect edge from %s is not marked as indirect or has "
3228 "associated indirect_info, the corresponding statement is: ",
3229 identifier_to_locale (e->caller->name ()));
3230 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3231 error_found = true;
3232 }
3233 }
3234 bool check_comdat = comdat_local_p ();
3235 for (e = callers; e; e = e->next_caller)
3236 {
3237 if (e->verify_count_and_frequency ())
3238 error_found = true;
3239 if (check_comdat
3240 && !in_same_comdat_group_p (e->caller))
3241 {
3242 error ("comdat-local function called by %s outside its comdat",
3243 identifier_to_locale (e->caller->name ()));
3244 error_found = true;
3245 }
3246 if (!e->inline_failed)
3247 {
3248 if (global.inlined_to
3249 != (e->caller->global.inlined_to
3250 ? e->caller->global.inlined_to : e->caller))
3251 {
3252 error ("inlined_to pointer is wrong");
3253 error_found = true;
3254 }
3255 if (callers->next_caller)
3256 {
3257 error ("multiple inline callers");
3258 error_found = true;
3259 }
3260 }
3261 else
3262 if (global.inlined_to)
3263 {
3264 error ("inlined_to pointer set for noninline callers");
3265 error_found = true;
3266 }
3267 }
3268 for (e = callees; e; e = e->next_callee)
3269 {
3270 if (e->verify_count_and_frequency ())
3271 error_found = true;
3272 if (gimple_has_body_p (e->caller->decl)
3273 && !e->caller->global.inlined_to
3274 && !e->speculative
3275 /* Optimized out calls are redirected to __builtin_unreachable. */
3276 && (e->frequency
3277 || ! e->callee->decl
3278 || DECL_BUILT_IN_CLASS (e->callee->decl) != BUILT_IN_NORMAL
3279 || DECL_FUNCTION_CODE (e->callee->decl) != BUILT_IN_UNREACHABLE)
3280 && (e->frequency
3281 != compute_call_stmt_bb_frequency (e->caller->decl,
3282 gimple_bb (e->call_stmt))))
3283 {
3284 error ("caller edge frequency %i does not match BB frequency %i",
3285 e->frequency,
3286 compute_call_stmt_bb_frequency (e->caller->decl,
3287 gimple_bb (e->call_stmt)));
3288 error_found = true;
3289 }
3290 }
3291 for (e = indirect_calls; e; e = e->next_callee)
3292 {
3293 if (e->verify_count_and_frequency ())
3294 error_found = true;
3295 if (gimple_has_body_p (e->caller->decl)
3296 && !e->caller->global.inlined_to
3297 && !e->speculative
3298 && (e->frequency
3299 != compute_call_stmt_bb_frequency (e->caller->decl,
3300 gimple_bb (e->call_stmt))))
3301 {
3302 error ("indirect call frequency %i does not match BB frequency %i",
3303 e->frequency,
3304 compute_call_stmt_bb_frequency (e->caller->decl,
3305 gimple_bb (e->call_stmt)));
3306 error_found = true;
3307 }
3308 }
3309 if (!callers && global.inlined_to)
3310 {
3311 error ("inlined_to pointer is set but no predecessors found");
3312 error_found = true;
3313 }
3314 if (global.inlined_to == this)
3315 {
3316 error ("inlined_to pointer refers to itself");
3317 error_found = true;
3318 }
3319
3320 if (clone_of)
3321 {
3322 cgraph_node *n;
3323 for (n = clone_of->clones; n; n = n->next_sibling_clone)
3324 if (n == this)
3325 break;
3326 if (!n)
3327 {
3328 error ("cgraph_node has wrong clone_of");
3329 error_found = true;
3330 }
3331 }
3332 if (clones)
3333 {
3334 cgraph_node *n;
3335 for (n = clones; n; n = n->next_sibling_clone)
3336 if (n->clone_of != this)
3337 break;
3338 if (n)
3339 {
3340 error ("cgraph_node has wrong clone list");
3341 error_found = true;
3342 }
3343 }
3344 if ((prev_sibling_clone || next_sibling_clone) && !clone_of)
3345 {
3346 error ("cgraph_node is in clone list but it is not clone");
3347 error_found = true;
3348 }
3349 if (!prev_sibling_clone && clone_of && clone_of->clones != this)
3350 {
3351 error ("cgraph_node has wrong prev_clone pointer");
3352 error_found = true;
3353 }
3354 if (prev_sibling_clone && prev_sibling_clone->next_sibling_clone != this)
3355 {
3356 error ("double linked list of clones corrupted");
3357 error_found = true;
3358 }
3359
3360 if (analyzed && alias)
3361 {
3362 bool ref_found = false;
3363 int i;
3364 ipa_ref *ref = NULL;
3365
3366 if (callees)
3367 {
3368 error ("Alias has call edges");
3369 error_found = true;
3370 }
3371 for (i = 0; iterate_reference (i, ref); i++)
3372 if (ref->use == IPA_REF_CHKP)
3373 ;
3374 else if (ref->use != IPA_REF_ALIAS)
3375 {
3376 error ("Alias has non-alias reference");
3377 error_found = true;
3378 }
3379 else if (ref_found)
3380 {
3381 error ("Alias has more than one alias reference");
3382 error_found = true;
3383 }
3384 else
3385 ref_found = true;
3386 if (!ref_found)
3387 {
3388 error ("Analyzed alias has no reference");
3389 error_found = true;
3390 }
3391 }
3392
3393 /* Check instrumented version reference. */
3394 if (instrumented_version
3395 && instrumented_version->instrumented_version != this)
3396 {
3397 error ("Instrumentation clone does not reference original node");
3398 error_found = true;
3399 }
3400
3401 /* Cannot have orig_decl for not instrumented nodes. */
3402 if (!instrumentation_clone && orig_decl)
3403 {
3404 error ("Not instrumented node has non-NULL original declaration");
3405 error_found = true;
3406 }
3407
3408 /* If original not instrumented node still exists then we may check
3409 original declaration is set properly. */
3410 if (instrumented_version
3411 && orig_decl
3412 && orig_decl != instrumented_version->decl)
3413 {
3414 error ("Instrumented node has wrong original declaration");
3415 error_found = true;
3416 }
3417
3418 /* Check all nodes have chkp reference to their instrumented versions. */
3419 if (analyzed
3420 && instrumented_version
3421 && !instrumentation_clone)
3422 {
3423 bool ref_found = false;
3424 int i;
3425 struct ipa_ref *ref;
3426
3427 for (i = 0; iterate_reference (i, ref); i++)
3428 if (ref->use == IPA_REF_CHKP)
3429 {
3430 if (ref_found)
3431 {
3432 error ("Node has more than one chkp reference");
3433 error_found = true;
3434 }
3435 if (ref->referred != instrumented_version)
3436 {
3437 error ("Wrong node is referenced with chkp reference");
3438 error_found = true;
3439 }
3440 ref_found = true;
3441 }
3442
3443 if (!ref_found)
3444 {
3445 error ("Analyzed node has no reference to instrumented version");
3446 error_found = true;
3447 }
3448 }
3449
3450 if (instrumentation_clone
3451 && DECL_BUILT_IN_CLASS (decl) == NOT_BUILT_IN)
3452 {
3453 tree name = DECL_ASSEMBLER_NAME (decl);
3454 tree orig_name = DECL_ASSEMBLER_NAME (orig_decl);
3455
3456 if (!IDENTIFIER_TRANSPARENT_ALIAS (name)
3457 || TREE_CHAIN (name) != orig_name)
3458 {
3459 error ("Alias chain for instrumented node is broken");
3460 error_found = true;
3461 }
3462 }
3463
3464 if (analyzed && thunk.thunk_p)
3465 {
3466 if (!callees)
3467 {
3468 error ("No edge out of thunk node");
3469 error_found = true;
3470 }
3471 else if (callees->next_callee)
3472 {
3473 error ("More than one edge out of thunk node");
3474 error_found = true;
3475 }
3476 if (gimple_has_body_p (decl) && !global.inlined_to)
3477 {
3478 error ("Thunk is not supposed to have body");
3479 error_found = true;
3480 }
3481 if (thunk.add_pointer_bounds_args
3482 && !instrumented_version->semantically_equivalent_p (callees->callee))
3483 {
3484 error ("Instrumentation thunk has wrong edge callee");
3485 error_found = true;
3486 }
3487 }
3488 else if (analyzed && gimple_has_body_p (decl)
3489 && !TREE_ASM_WRITTEN (decl)
3490 && (!DECL_EXTERNAL (decl) || global.inlined_to)
3491 && !flag_wpa)
3492 {
3493 if (this_cfun->cfg)
3494 {
3495 hash_set<gimple *> stmts;
3496 int i;
3497 ipa_ref *ref = NULL;
3498
3499 /* Reach the trees by walking over the CFG, and note the
3500 enclosing basic-blocks in the call edges. */
3501 FOR_EACH_BB_FN (this_block, this_cfun)
3502 {
3503 for (gsi = gsi_start_phis (this_block);
3504 !gsi_end_p (gsi); gsi_next (&gsi))
3505 stmts.add (gsi_stmt (gsi));
3506 for (gsi = gsi_start_bb (this_block);
3507 !gsi_end_p (gsi);
3508 gsi_next (&gsi))
3509 {
3510 gimple *stmt = gsi_stmt (gsi);
3511 stmts.add (stmt);
3512 if (is_gimple_call (stmt))
3513 {
3514 cgraph_edge *e = get_edge (stmt);
3515 tree decl = gimple_call_fndecl (stmt);
3516 if (e)
3517 {
3518 if (e->aux)
3519 {
3520 error ("shared call_stmt:");
3521 cgraph_debug_gimple_stmt (this_cfun, stmt);
3522 error_found = true;
3523 }
3524 if (!e->indirect_unknown_callee)
3525 {
3526 if (e->verify_corresponds_to_fndecl (decl))
3527 {
3528 error ("edge points to wrong declaration:");
3529 debug_tree (e->callee->decl);
3530 fprintf (stderr," Instead of:");
3531 debug_tree (decl);
3532 error_found = true;
3533 }
3534 }
3535 else if (decl)
3536 {
3537 error ("an indirect edge with unknown callee "
3538 "corresponding to a call_stmt with "
3539 "a known declaration:");
3540 error_found = true;
3541 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3542 }
3543 e->aux = (void *)1;
3544 }
3545 else if (decl)
3546 {
3547 error ("missing callgraph edge for call stmt:");
3548 cgraph_debug_gimple_stmt (this_cfun, stmt);
3549 error_found = true;
3550 }
3551 }
3552 }
3553 }
3554 for (i = 0; iterate_reference (i, ref); i++)
3555 if (ref->stmt && !stmts.contains (ref->stmt))
3556 {
3557 error ("reference to dead statement");
3558 cgraph_debug_gimple_stmt (this_cfun, ref->stmt);
3559 error_found = true;
3560 }
3561 }
3562 else
3563 /* No CFG available?! */
3564 gcc_unreachable ();
3565
3566 for (e = callees; e; e = e->next_callee)
3567 {
3568 if (!e->aux)
3569 {
3570 error ("edge %s->%s has no corresponding call_stmt",
3571 identifier_to_locale (e->caller->name ()),
3572 identifier_to_locale (e->callee->name ()));
3573 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3574 error_found = true;
3575 }
3576 e->aux = 0;
3577 }
3578 for (e = indirect_calls; e; e = e->next_callee)
3579 {
3580 if (!e->aux && !e->speculative)
3581 {
3582 error ("an indirect edge from %s has no corresponding call_stmt",
3583 identifier_to_locale (e->caller->name ()));
3584 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3585 error_found = true;
3586 }
3587 e->aux = 0;
3588 }
3589 }
3590 if (error_found)
3591 {
3592 dump (stderr);
3593 internal_error ("verify_cgraph_node failed");
3594 }
3595 timevar_pop (TV_CGRAPH_VERIFY);
3596 }
3597
3598 /* Verify whole cgraph structure. */
3599 DEBUG_FUNCTION void
3600 cgraph_node::verify_cgraph_nodes (void)
3601 {
3602 cgraph_node *node;
3603
3604 if (seen_error ())
3605 return;
3606
3607 FOR_EACH_FUNCTION (node)
3608 node->verify ();
3609 }
3610
3611 /* Walk the alias chain to return the function cgraph_node is alias of.
3612 Walk through thunks, too.
3613 When AVAILABILITY is non-NULL, get minimal availability in the chain.
3614 When REF is non-NULL, assume that reference happens in symbol REF
3615 when determining the availability. */
3616
3617 cgraph_node *
3618 cgraph_node::function_symbol (enum availability *availability,
3619 struct symtab_node *ref)
3620 {
3621 cgraph_node *node = ultimate_alias_target (availability, ref);
3622
3623 while (node->thunk.thunk_p)
3624 {
3625 ref = node;
3626 node = node->callees->callee;
3627 if (availability)
3628 {
3629 enum availability a;
3630 a = node->get_availability (ref);
3631 if (a < *availability)
3632 *availability = a;
3633 }
3634 node = node->ultimate_alias_target (availability, ref);
3635 }
3636 return node;
3637 }
3638
3639 /* Walk the alias chain to return the function cgraph_node is alias of.
3640 Walk through non virtual thunks, too. Thus we return either a function
3641 or a virtual thunk node.
3642 When AVAILABILITY is non-NULL, get minimal availability in the chain.
3643 When REF is non-NULL, assume that reference happens in symbol REF
3644 when determining the availability. */
3645
3646 cgraph_node *
3647 cgraph_node::function_or_virtual_thunk_symbol
3648 (enum availability *availability,
3649 struct symtab_node *ref)
3650 {
3651 cgraph_node *node = ultimate_alias_target (availability, ref);
3652
3653 while (node->thunk.thunk_p && !node->thunk.virtual_offset_p)
3654 {
3655 ref = node;
3656 node = node->callees->callee;
3657 if (availability)
3658 {
3659 enum availability a;
3660 a = node->get_availability (ref);
3661 if (a < *availability)
3662 *availability = a;
3663 }
3664 node = node->ultimate_alias_target (availability, ref);
3665 }
3666 return node;
3667 }
3668
3669 /* When doing LTO, read cgraph_node's body from disk if it is not already
3670 present. */
3671
3672 bool
3673 cgraph_node::get_untransformed_body (void)
3674 {
3675 lto_file_decl_data *file_data;
3676 const char *data, *name;
3677 size_t len;
3678 tree decl = this->decl;
3679
3680 /* Check if body is already there. Either we have gimple body or
3681 the function is thunk and in that case we set DECL_ARGUMENTS. */
3682 if (DECL_ARGUMENTS (decl) || gimple_has_body_p (decl))
3683 return false;
3684
3685 gcc_assert (in_lto_p && !DECL_RESULT (decl));
3686
3687 timevar_push (TV_IPA_LTO_GIMPLE_IN);
3688
3689 file_data = lto_file_data;
3690 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
3691
3692 /* We may have renamed the declaration, e.g., a static function. */
3693 name = lto_get_decl_name_mapping (file_data, name);
3694 struct lto_in_decl_state *decl_state
3695 = lto_get_function_in_decl_state (file_data, decl);
3696
3697 data = lto_get_section_data (file_data, LTO_section_function_body,
3698 name, &len, decl_state->compressed);
3699 if (!data)
3700 fatal_error (input_location, "%s: section %s is missing",
3701 file_data->file_name,
3702 name);
3703
3704 gcc_assert (DECL_STRUCT_FUNCTION (decl) == NULL);
3705
3706 lto_input_function_body (file_data, this, data);
3707 lto_stats.num_function_bodies++;
3708 lto_free_section_data (file_data, LTO_section_function_body, name,
3709 data, len, decl_state->compressed);
3710 lto_free_function_in_decl_state_for_node (this);
3711 /* Keep lto file data so ipa-inline-analysis knows about cross module
3712 inlining. */
3713
3714 timevar_pop (TV_IPA_LTO_GIMPLE_IN);
3715
3716 return true;
3717 }
3718
3719 /* Prepare function body. When doing LTO, read cgraph_node's body from disk
3720 if it is not already present. When some IPA transformations are scheduled,
3721 apply them. */
3722
3723 bool
3724 cgraph_node::get_body (void)
3725 {
3726 bool updated;
3727
3728 updated = get_untransformed_body ();
3729
3730 /* Getting transformed body makes no sense for inline clones;
3731 we should never use this on real clones because they are materialized
3732 early.
3733 TODO: Materializing clones here will likely lead to smaller LTRANS
3734 footprint. */
3735 gcc_assert (!global.inlined_to && !clone_of);
3736 if (ipa_transforms_to_apply.exists ())
3737 {
3738 opt_pass *saved_current_pass = current_pass;
3739 FILE *saved_dump_file = dump_file;
3740 const char *saved_dump_file_name = dump_file_name;
3741 dump_flags_t saved_dump_flags = dump_flags;
3742 dump_file_name = NULL;
3743 dump_file = NULL;
3744
3745 push_cfun (DECL_STRUCT_FUNCTION (decl));
3746 execute_all_ipa_transforms ();
3747 cgraph_edge::rebuild_edges ();
3748 free_dominance_info (CDI_DOMINATORS);
3749 free_dominance_info (CDI_POST_DOMINATORS);
3750 pop_cfun ();
3751 updated = true;
3752
3753 current_pass = saved_current_pass;
3754 dump_file = saved_dump_file;
3755 dump_file_name = saved_dump_file_name;
3756 dump_flags = saved_dump_flags;
3757 }
3758 return updated;
3759 }
3760
3761 /* Return the DECL_STRUCT_FUNCTION of the function. */
3762
3763 struct function *
3764 cgraph_node::get_fun (void)
3765 {
3766 cgraph_node *node = this;
3767 struct function *fun = DECL_STRUCT_FUNCTION (node->decl);
3768
3769 while (!fun && node->clone_of)
3770 {
3771 node = node->clone_of;
3772 fun = DECL_STRUCT_FUNCTION (node->decl);
3773 }
3774
3775 return fun;
3776 }
3777
3778 /* Verify if the type of the argument matches that of the function
3779 declaration. If we cannot verify this or there is a mismatch,
3780 return false. */
3781
3782 static bool
3783 gimple_check_call_args (gimple *stmt, tree fndecl, bool args_count_match)
3784 {
3785 tree parms, p;
3786 unsigned int i, nargs;
3787
3788 /* Calls to internal functions always match their signature. */
3789 if (gimple_call_internal_p (stmt))
3790 return true;
3791
3792 nargs = gimple_call_num_args (stmt);
3793
3794 /* Get argument types for verification. */
3795 if (fndecl)
3796 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3797 else
3798 parms = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
3799
3800 /* Verify if the type of the argument matches that of the function
3801 declaration. If we cannot verify this or there is a mismatch,
3802 return false. */
3803 if (fndecl && DECL_ARGUMENTS (fndecl))
3804 {
3805 for (i = 0, p = DECL_ARGUMENTS (fndecl);
3806 i < nargs;
3807 i++, p = DECL_CHAIN (p))
3808 {
3809 tree arg;
3810 /* We cannot distinguish a varargs function from the case
3811 of excess parameters, still deferring the inlining decision
3812 to the callee is possible. */
3813 if (!p)
3814 break;
3815 arg = gimple_call_arg (stmt, i);
3816 if (p == error_mark_node
3817 || DECL_ARG_TYPE (p) == error_mark_node
3818 || arg == error_mark_node
3819 || (!types_compatible_p (DECL_ARG_TYPE (p), TREE_TYPE (arg))
3820 && !fold_convertible_p (DECL_ARG_TYPE (p), arg)))
3821 return false;
3822 }
3823 if (args_count_match && p)
3824 return false;
3825 }
3826 else if (parms)
3827 {
3828 for (i = 0, p = parms; i < nargs; i++, p = TREE_CHAIN (p))
3829 {
3830 tree arg;
3831 /* If this is a varargs function defer inlining decision
3832 to callee. */
3833 if (!p)
3834 break;
3835 arg = gimple_call_arg (stmt, i);
3836 if (TREE_VALUE (p) == error_mark_node
3837 || arg == error_mark_node
3838 || TREE_CODE (TREE_VALUE (p)) == VOID_TYPE
3839 || (!types_compatible_p (TREE_VALUE (p), TREE_TYPE (arg))
3840 && !fold_convertible_p (TREE_VALUE (p), arg)))
3841 return false;
3842 }
3843 }
3844 else
3845 {
3846 if (nargs != 0)
3847 return false;
3848 }
3849 return true;
3850 }
3851
3852 /* Verify if the type of the argument and lhs of CALL_STMT matches
3853 that of the function declaration CALLEE. If ARGS_COUNT_MATCH is
3854 true, the arg count needs to be the same.
3855 If we cannot verify this or there is a mismatch, return false. */
3856
3857 bool
3858 gimple_check_call_matching_types (gimple *call_stmt, tree callee,
3859 bool args_count_match)
3860 {
3861 tree lhs;
3862
3863 if ((DECL_RESULT (callee)
3864 && !DECL_BY_REFERENCE (DECL_RESULT (callee))
3865 && (lhs = gimple_call_lhs (call_stmt)) != NULL_TREE
3866 && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee)),
3867 TREE_TYPE (lhs))
3868 && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee)), lhs))
3869 || !gimple_check_call_args (call_stmt, callee, args_count_match))
3870 return false;
3871 return true;
3872 }
3873
3874 /* Reset all state within cgraph.c so that we can rerun the compiler
3875 within the same process. For use by toplev::finalize. */
3876
3877 void
3878 cgraph_c_finalize (void)
3879 {
3880 symtab = NULL;
3881
3882 x_cgraph_nodes_queue = NULL;
3883
3884 cgraph_fnver_htab = NULL;
3885 version_info_node = NULL;
3886 }
3887
3888 /* A wroker for call_for_symbol_and_aliases. */
3889
3890 bool
3891 cgraph_node::call_for_symbol_and_aliases_1 (bool (*callback) (cgraph_node *,
3892 void *),
3893 void *data,
3894 bool include_overwritable)
3895 {
3896 ipa_ref *ref;
3897 FOR_EACH_ALIAS (this, ref)
3898 {
3899 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
3900 if (include_overwritable
3901 || alias->get_availability () > AVAIL_INTERPOSABLE)
3902 if (alias->call_for_symbol_and_aliases (callback, data,
3903 include_overwritable))
3904 return true;
3905 }
3906 return false;
3907 }
3908
3909 /* Return true if NODE has thunk. */
3910
3911 bool
3912 cgraph_node::has_thunk_p (cgraph_node *node, void *)
3913 {
3914 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
3915 if (e->caller->thunk.thunk_p)
3916 return true;
3917 return false;
3918 }
3919
3920 #include "gt-cgraph.h"