genmatch.c (comparison_code_p): New predicate.
[gcc.git] / gcc / cgraph.c
1 /* Callgraph handling code.
2 Copyright (C) 2003-2016 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This file contains basic routines manipulating call graph
22
23 The call-graph is a data structure designed for intra-procedural optimization.
24 It represents a multi-graph where nodes are functions and edges are call sites. */
25
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "backend.h"
30 #include "target.h"
31 #include "rtl.h"
32 #include "tree.h"
33 #include "gimple.h"
34 #include "predict.h"
35 #include "alloc-pool.h"
36 #include "gimple-ssa.h"
37 #include "cgraph.h"
38 #include "lto-streamer.h"
39 #include "fold-const.h"
40 #include "varasm.h"
41 #include "calls.h"
42 #include "print-tree.h"
43 #include "langhooks.h"
44 #include "intl.h"
45 #include "tree-eh.h"
46 #include "gimple-iterator.h"
47 #include "tree-cfg.h"
48 #include "tree-ssa.h"
49 #include "value-prof.h"
50 #include "ipa-utils.h"
51 #include "symbol-summary.h"
52 #include "ipa-prop.h"
53 #include "ipa-inline.h"
54 #include "cfgloop.h"
55 #include "gimple-pretty-print.h"
56 #include "tree-dfa.h"
57 #include "profile.h"
58 #include "params.h"
59 #include "tree-chkp.h"
60 #include "context.h"
61 #include "gimplify.h"
62
63 /* FIXME: Only for PROP_loops, but cgraph shouldn't have to know about this. */
64 #include "tree-pass.h"
65
66 /* Queue of cgraph nodes scheduled to be lowered. */
67 symtab_node *x_cgraph_nodes_queue;
68 #define cgraph_nodes_queue ((cgraph_node *)x_cgraph_nodes_queue)
69
70 /* Symbol table global context. */
71 symbol_table *symtab;
72
73 /* List of hooks triggered on cgraph_edge events. */
74 struct cgraph_edge_hook_list {
75 cgraph_edge_hook hook;
76 void *data;
77 struct cgraph_edge_hook_list *next;
78 };
79
80 /* List of hooks triggered on cgraph_node events. */
81 struct cgraph_node_hook_list {
82 cgraph_node_hook hook;
83 void *data;
84 struct cgraph_node_hook_list *next;
85 };
86
87 /* List of hooks triggered on events involving two cgraph_edges. */
88 struct cgraph_2edge_hook_list {
89 cgraph_2edge_hook hook;
90 void *data;
91 struct cgraph_2edge_hook_list *next;
92 };
93
94 /* List of hooks triggered on events involving two cgraph_nodes. */
95 struct cgraph_2node_hook_list {
96 cgraph_2node_hook hook;
97 void *data;
98 struct cgraph_2node_hook_list *next;
99 };
100
101 /* Hash descriptor for cgraph_function_version_info. */
102
103 struct function_version_hasher : ggc_ptr_hash<cgraph_function_version_info>
104 {
105 static hashval_t hash (cgraph_function_version_info *);
106 static bool equal (cgraph_function_version_info *,
107 cgraph_function_version_info *);
108 };
109
110 /* Map a cgraph_node to cgraph_function_version_info using this htab.
111 The cgraph_function_version_info has a THIS_NODE field that is the
112 corresponding cgraph_node.. */
113
114 static GTY(()) hash_table<function_version_hasher> *cgraph_fnver_htab = NULL;
115
116 /* Hash function for cgraph_fnver_htab. */
117 hashval_t
118 function_version_hasher::hash (cgraph_function_version_info *ptr)
119 {
120 int uid = ptr->this_node->uid;
121 return (hashval_t)(uid);
122 }
123
124 /* eq function for cgraph_fnver_htab. */
125 bool
126 function_version_hasher::equal (cgraph_function_version_info *n1,
127 cgraph_function_version_info *n2)
128 {
129 return n1->this_node->uid == n2->this_node->uid;
130 }
131
132 /* Mark as GC root all allocated nodes. */
133 static GTY(()) struct cgraph_function_version_info *
134 version_info_node = NULL;
135
136 /* Return true if NODE's address can be compared. */
137
138 bool
139 symtab_node::address_can_be_compared_p ()
140 {
141 /* Address of virtual tables and functions is never compared. */
142 if (DECL_VIRTUAL_P (decl))
143 return false;
144 /* Address of C++ cdtors is never compared. */
145 if (is_a <cgraph_node *> (this)
146 && (DECL_CXX_CONSTRUCTOR_P (decl)
147 || DECL_CXX_DESTRUCTOR_P (decl)))
148 return false;
149 /* Constant pool symbols addresses are never compared.
150 flag_merge_constants permits us to assume the same on readonly vars. */
151 if (is_a <varpool_node *> (this)
152 && (DECL_IN_CONSTANT_POOL (decl)
153 || (flag_merge_constants >= 2
154 && TREE_READONLY (decl) && !TREE_THIS_VOLATILE (decl))))
155 return false;
156 return true;
157 }
158
159 /* Get the cgraph_function_version_info node corresponding to node. */
160 cgraph_function_version_info *
161 cgraph_node::function_version (void)
162 {
163 cgraph_function_version_info key;
164 key.this_node = this;
165
166 if (cgraph_fnver_htab == NULL)
167 return NULL;
168
169 return cgraph_fnver_htab->find (&key);
170 }
171
172 /* Insert a new cgraph_function_version_info node into cgraph_fnver_htab
173 corresponding to cgraph_node NODE. */
174 cgraph_function_version_info *
175 cgraph_node::insert_new_function_version (void)
176 {
177 version_info_node = NULL;
178 version_info_node = ggc_cleared_alloc<cgraph_function_version_info> ();
179 version_info_node->this_node = this;
180
181 if (cgraph_fnver_htab == NULL)
182 cgraph_fnver_htab = hash_table<function_version_hasher>::create_ggc (2);
183
184 *cgraph_fnver_htab->find_slot (version_info_node, INSERT)
185 = version_info_node;
186 return version_info_node;
187 }
188
189 /* Remove the cgraph_function_version_info and cgraph_node for DECL. This
190 DECL is a duplicate declaration. */
191 void
192 cgraph_node::delete_function_version (tree decl)
193 {
194 cgraph_node *decl_node = cgraph_node::get (decl);
195 cgraph_function_version_info *decl_v = NULL;
196
197 if (decl_node == NULL)
198 return;
199
200 decl_v = decl_node->function_version ();
201
202 if (decl_v == NULL)
203 return;
204
205 if (decl_v->prev != NULL)
206 decl_v->prev->next = decl_v->next;
207
208 if (decl_v->next != NULL)
209 decl_v->next->prev = decl_v->prev;
210
211 if (cgraph_fnver_htab != NULL)
212 cgraph_fnver_htab->remove_elt (decl_v);
213
214 decl_node->remove ();
215 }
216
217 /* Record that DECL1 and DECL2 are semantically identical function
218 versions. */
219 void
220 cgraph_node::record_function_versions (tree decl1, tree decl2)
221 {
222 cgraph_node *decl1_node = cgraph_node::get_create (decl1);
223 cgraph_node *decl2_node = cgraph_node::get_create (decl2);
224 cgraph_function_version_info *decl1_v = NULL;
225 cgraph_function_version_info *decl2_v = NULL;
226 cgraph_function_version_info *before;
227 cgraph_function_version_info *after;
228
229 gcc_assert (decl1_node != NULL && decl2_node != NULL);
230 decl1_v = decl1_node->function_version ();
231 decl2_v = decl2_node->function_version ();
232
233 if (decl1_v != NULL && decl2_v != NULL)
234 return;
235
236 if (decl1_v == NULL)
237 decl1_v = decl1_node->insert_new_function_version ();
238
239 if (decl2_v == NULL)
240 decl2_v = decl2_node->insert_new_function_version ();
241
242 /* Chain decl2_v and decl1_v. All semantically identical versions
243 will be chained together. */
244
245 before = decl1_v;
246 after = decl2_v;
247
248 while (before->next != NULL)
249 before = before->next;
250
251 while (after->prev != NULL)
252 after= after->prev;
253
254 before->next = after;
255 after->prev = before;
256 }
257
258 /* Initialize callgraph dump file. */
259
260 void
261 symbol_table::initialize (void)
262 {
263 if (!dump_file)
264 dump_file = dump_begin (TDI_cgraph, NULL);
265 }
266
267 /* Allocate new callgraph node and insert it into basic data structures. */
268
269 cgraph_node *
270 symbol_table::create_empty (void)
271 {
272 cgraph_node *node = allocate_cgraph_symbol ();
273
274 node->type = SYMTAB_FUNCTION;
275 node->frequency = NODE_FREQUENCY_NORMAL;
276 node->count_materialization_scale = REG_BR_PROB_BASE;
277 cgraph_count++;
278
279 return node;
280 }
281
282 /* Register HOOK to be called with DATA on each removed edge. */
283 cgraph_edge_hook_list *
284 symbol_table::add_edge_removal_hook (cgraph_edge_hook hook, void *data)
285 {
286 cgraph_edge_hook_list *entry;
287 cgraph_edge_hook_list **ptr = &m_first_edge_removal_hook;
288
289 entry = (cgraph_edge_hook_list *) xmalloc (sizeof (*entry));
290 entry->hook = hook;
291 entry->data = data;
292 entry->next = NULL;
293 while (*ptr)
294 ptr = &(*ptr)->next;
295 *ptr = entry;
296 return entry;
297 }
298
299 /* Remove ENTRY from the list of hooks called on removing edges. */
300 void
301 symbol_table::remove_edge_removal_hook (cgraph_edge_hook_list *entry)
302 {
303 cgraph_edge_hook_list **ptr = &m_first_edge_removal_hook;
304
305 while (*ptr != entry)
306 ptr = &(*ptr)->next;
307 *ptr = entry->next;
308 free (entry);
309 }
310
311 /* Call all edge removal hooks. */
312 void
313 symbol_table::call_edge_removal_hooks (cgraph_edge *e)
314 {
315 cgraph_edge_hook_list *entry = m_first_edge_removal_hook;
316 while (entry)
317 {
318 entry->hook (e, entry->data);
319 entry = entry->next;
320 }
321 }
322
323 /* Register HOOK to be called with DATA on each removed node. */
324 cgraph_node_hook_list *
325 symbol_table::add_cgraph_removal_hook (cgraph_node_hook hook, void *data)
326 {
327 cgraph_node_hook_list *entry;
328 cgraph_node_hook_list **ptr = &m_first_cgraph_removal_hook;
329
330 entry = (cgraph_node_hook_list *) xmalloc (sizeof (*entry));
331 entry->hook = hook;
332 entry->data = data;
333 entry->next = NULL;
334 while (*ptr)
335 ptr = &(*ptr)->next;
336 *ptr = entry;
337 return entry;
338 }
339
340 /* Remove ENTRY from the list of hooks called on removing nodes. */
341 void
342 symbol_table::remove_cgraph_removal_hook (cgraph_node_hook_list *entry)
343 {
344 cgraph_node_hook_list **ptr = &m_first_cgraph_removal_hook;
345
346 while (*ptr != entry)
347 ptr = &(*ptr)->next;
348 *ptr = entry->next;
349 free (entry);
350 }
351
352 /* Call all node removal hooks. */
353 void
354 symbol_table::call_cgraph_removal_hooks (cgraph_node *node)
355 {
356 cgraph_node_hook_list *entry = m_first_cgraph_removal_hook;
357 while (entry)
358 {
359 entry->hook (node, entry->data);
360 entry = entry->next;
361 }
362 }
363
364 /* Call all node removal hooks. */
365 void
366 symbol_table::call_cgraph_insertion_hooks (cgraph_node *node)
367 {
368 cgraph_node_hook_list *entry = m_first_cgraph_insertion_hook;
369 while (entry)
370 {
371 entry->hook (node, entry->data);
372 entry = entry->next;
373 }
374 }
375
376
377 /* Register HOOK to be called with DATA on each inserted node. */
378 cgraph_node_hook_list *
379 symbol_table::add_cgraph_insertion_hook (cgraph_node_hook hook, void *data)
380 {
381 cgraph_node_hook_list *entry;
382 cgraph_node_hook_list **ptr = &m_first_cgraph_insertion_hook;
383
384 entry = (cgraph_node_hook_list *) xmalloc (sizeof (*entry));
385 entry->hook = hook;
386 entry->data = data;
387 entry->next = NULL;
388 while (*ptr)
389 ptr = &(*ptr)->next;
390 *ptr = entry;
391 return entry;
392 }
393
394 /* Remove ENTRY from the list of hooks called on inserted nodes. */
395 void
396 symbol_table::remove_cgraph_insertion_hook (cgraph_node_hook_list *entry)
397 {
398 cgraph_node_hook_list **ptr = &m_first_cgraph_insertion_hook;
399
400 while (*ptr != entry)
401 ptr = &(*ptr)->next;
402 *ptr = entry->next;
403 free (entry);
404 }
405
406 /* Register HOOK to be called with DATA on each duplicated edge. */
407 cgraph_2edge_hook_list *
408 symbol_table::add_edge_duplication_hook (cgraph_2edge_hook hook, void *data)
409 {
410 cgraph_2edge_hook_list *entry;
411 cgraph_2edge_hook_list **ptr = &m_first_edge_duplicated_hook;
412
413 entry = (cgraph_2edge_hook_list *) xmalloc (sizeof (*entry));
414 entry->hook = hook;
415 entry->data = data;
416 entry->next = NULL;
417 while (*ptr)
418 ptr = &(*ptr)->next;
419 *ptr = entry;
420 return entry;
421 }
422
423 /* Remove ENTRY from the list of hooks called on duplicating edges. */
424 void
425 symbol_table::remove_edge_duplication_hook (cgraph_2edge_hook_list *entry)
426 {
427 cgraph_2edge_hook_list **ptr = &m_first_edge_duplicated_hook;
428
429 while (*ptr != entry)
430 ptr = &(*ptr)->next;
431 *ptr = entry->next;
432 free (entry);
433 }
434
435 /* Call all edge duplication hooks. */
436 void
437 symbol_table::call_edge_duplication_hooks (cgraph_edge *cs1, cgraph_edge *cs2)
438 {
439 cgraph_2edge_hook_list *entry = m_first_edge_duplicated_hook;
440 while (entry)
441 {
442 entry->hook (cs1, cs2, entry->data);
443 entry = entry->next;
444 }
445 }
446
447 /* Register HOOK to be called with DATA on each duplicated node. */
448 cgraph_2node_hook_list *
449 symbol_table::add_cgraph_duplication_hook (cgraph_2node_hook hook, void *data)
450 {
451 cgraph_2node_hook_list *entry;
452 cgraph_2node_hook_list **ptr = &m_first_cgraph_duplicated_hook;
453
454 entry = (cgraph_2node_hook_list *) xmalloc (sizeof (*entry));
455 entry->hook = hook;
456 entry->data = data;
457 entry->next = NULL;
458 while (*ptr)
459 ptr = &(*ptr)->next;
460 *ptr = entry;
461 return entry;
462 }
463
464 /* Remove ENTRY from the list of hooks called on duplicating nodes. */
465 void
466 symbol_table::remove_cgraph_duplication_hook (cgraph_2node_hook_list *entry)
467 {
468 cgraph_2node_hook_list **ptr = &m_first_cgraph_duplicated_hook;
469
470 while (*ptr != entry)
471 ptr = &(*ptr)->next;
472 *ptr = entry->next;
473 free (entry);
474 }
475
476 /* Call all node duplication hooks. */
477 void
478 symbol_table::call_cgraph_duplication_hooks (cgraph_node *node,
479 cgraph_node *node2)
480 {
481 cgraph_2node_hook_list *entry = m_first_cgraph_duplicated_hook;
482 while (entry)
483 {
484 entry->hook (node, node2, entry->data);
485 entry = entry->next;
486 }
487 }
488
489 /* Return cgraph node assigned to DECL. Create new one when needed. */
490
491 cgraph_node *
492 cgraph_node::create (tree decl)
493 {
494 cgraph_node *node = symtab->create_empty ();
495 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
496
497 node->decl = decl;
498
499 if ((flag_openacc || flag_openmp)
500 && lookup_attribute ("omp declare target", DECL_ATTRIBUTES (decl)))
501 {
502 node->offloadable = 1;
503 if (ENABLE_OFFLOADING)
504 g->have_offload = true;
505 }
506
507 node->register_symbol ();
508
509 if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
510 {
511 node->origin = cgraph_node::get_create (DECL_CONTEXT (decl));
512 node->next_nested = node->origin->nested;
513 node->origin->nested = node;
514 }
515 return node;
516 }
517
518 /* Try to find a call graph node for declaration DECL and if it does not exist
519 or if it corresponds to an inline clone, create a new one. */
520
521 cgraph_node *
522 cgraph_node::get_create (tree decl)
523 {
524 cgraph_node *first_clone = cgraph_node::get (decl);
525
526 if (first_clone && !first_clone->global.inlined_to)
527 return first_clone;
528
529 cgraph_node *node = cgraph_node::create (decl);
530 if (first_clone)
531 {
532 first_clone->clone_of = node;
533 node->clones = first_clone;
534 symtab->symtab_prevail_in_asm_name_hash (node);
535 node->decl->decl_with_vis.symtab_node = node;
536 if (dump_file)
537 fprintf (dump_file, "Introduced new external node "
538 "(%s/%i) and turned into root of the clone tree.\n",
539 node->name (), node->order);
540 }
541 else if (dump_file)
542 fprintf (dump_file, "Introduced new external node "
543 "(%s/%i).\n", node->name (), node->order);
544 return node;
545 }
546
547 /* Mark ALIAS as an alias to DECL. DECL_NODE is cgraph node representing
548 the function body is associated with (not necessarily cgraph_node (DECL). */
549
550 cgraph_node *
551 cgraph_node::create_alias (tree alias, tree target)
552 {
553 cgraph_node *alias_node;
554
555 gcc_assert (TREE_CODE (target) == FUNCTION_DECL
556 || TREE_CODE (target) == IDENTIFIER_NODE);
557 gcc_assert (TREE_CODE (alias) == FUNCTION_DECL);
558 alias_node = cgraph_node::get_create (alias);
559 gcc_assert (!alias_node->definition);
560 alias_node->alias_target = target;
561 alias_node->definition = true;
562 alias_node->alias = true;
563 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (alias)) != NULL)
564 alias_node->transparent_alias = alias_node->weakref = true;
565 return alias_node;
566 }
567
568 /* Attempt to mark ALIAS as an alias to DECL. Return alias node if successful
569 and NULL otherwise.
570 Same body aliases are output whenever the body of DECL is output,
571 and cgraph_node::get (ALIAS) transparently returns
572 cgraph_node::get (DECL). */
573
574 cgraph_node *
575 cgraph_node::create_same_body_alias (tree alias, tree decl)
576 {
577 cgraph_node *n;
578 #ifndef ASM_OUTPUT_DEF
579 /* If aliases aren't supported by the assembler, fail. */
580 return NULL;
581 #endif
582 /* Langhooks can create same body aliases of symbols not defined.
583 Those are useless. Drop them on the floor. */
584 if (symtab->global_info_ready)
585 return NULL;
586
587 n = cgraph_node::create_alias (alias, decl);
588 n->cpp_implicit_alias = true;
589 if (symtab->cpp_implicit_aliases_done)
590 n->resolve_alias (cgraph_node::get (decl));
591 return n;
592 }
593
594 /* Add thunk alias into callgraph. The alias declaration is ALIAS and it
595 aliases DECL with an adjustments made into the first parameter.
596 See comments in thunk_adjust for detail on the parameters. */
597
598 cgraph_node *
599 cgraph_node::create_thunk (tree alias, tree, bool this_adjusting,
600 HOST_WIDE_INT fixed_offset,
601 HOST_WIDE_INT virtual_value,
602 tree virtual_offset,
603 tree real_alias)
604 {
605 cgraph_node *node;
606
607 node = cgraph_node::get (alias);
608 if (node)
609 node->reset ();
610 else
611 node = cgraph_node::create (alias);
612 gcc_checking_assert (!virtual_offset
613 || wi::eq_p (virtual_offset, virtual_value));
614 node->thunk.fixed_offset = fixed_offset;
615 node->thunk.this_adjusting = this_adjusting;
616 node->thunk.virtual_value = virtual_value;
617 node->thunk.virtual_offset_p = virtual_offset != NULL;
618 node->thunk.alias = real_alias;
619 node->thunk.thunk_p = true;
620 node->definition = true;
621
622 return node;
623 }
624
625 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
626 Return NULL if there's no such node. */
627
628 cgraph_node *
629 cgraph_node::get_for_asmname (tree asmname)
630 {
631 /* We do not want to look at inline clones. */
632 for (symtab_node *node = symtab_node::get_for_asmname (asmname);
633 node;
634 node = node->next_sharing_asm_name)
635 {
636 cgraph_node *cn = dyn_cast <cgraph_node *> (node);
637 if (cn && !cn->global.inlined_to)
638 return cn;
639 }
640 return NULL;
641 }
642
643 /* Returns a hash value for X (which really is a cgraph_edge). */
644
645 hashval_t
646 cgraph_edge_hasher::hash (cgraph_edge *e)
647 {
648 /* This is a really poor hash function, but it is what htab_hash_pointer
649 uses. */
650 return (hashval_t) ((intptr_t)e->call_stmt >> 3);
651 }
652
653 /* Returns a hash value for X (which really is a cgraph_edge). */
654
655 hashval_t
656 cgraph_edge_hasher::hash (gimple *call_stmt)
657 {
658 /* This is a really poor hash function, but it is what htab_hash_pointer
659 uses. */
660 return (hashval_t) ((intptr_t)call_stmt >> 3);
661 }
662
663 /* Return nonzero if the call_stmt of cgraph_edge X is stmt *Y. */
664
665 inline bool
666 cgraph_edge_hasher::equal (cgraph_edge *x, gimple *y)
667 {
668 return x->call_stmt == y;
669 }
670
671 /* Add call graph edge E to call site hash of its caller. */
672
673 static inline void
674 cgraph_update_edge_in_call_site_hash (cgraph_edge *e)
675 {
676 gimple *call = e->call_stmt;
677 *e->caller->call_site_hash->find_slot_with_hash
678 (call, cgraph_edge_hasher::hash (call), INSERT) = e;
679 }
680
681 /* Add call graph edge E to call site hash of its caller. */
682
683 static inline void
684 cgraph_add_edge_to_call_site_hash (cgraph_edge *e)
685 {
686 /* There are two speculative edges for every statement (one direct,
687 one indirect); always hash the direct one. */
688 if (e->speculative && e->indirect_unknown_callee)
689 return;
690 cgraph_edge **slot = e->caller->call_site_hash->find_slot_with_hash
691 (e->call_stmt, cgraph_edge_hasher::hash (e->call_stmt), INSERT);
692 if (*slot)
693 {
694 gcc_assert (((cgraph_edge *)*slot)->speculative);
695 if (e->callee)
696 *slot = e;
697 return;
698 }
699 gcc_assert (!*slot || e->speculative);
700 *slot = e;
701 }
702
703 /* Return the callgraph edge representing the GIMPLE_CALL statement
704 CALL_STMT. */
705
706 cgraph_edge *
707 cgraph_node::get_edge (gimple *call_stmt)
708 {
709 cgraph_edge *e, *e2;
710 int n = 0;
711
712 if (call_site_hash)
713 return call_site_hash->find_with_hash
714 (call_stmt, cgraph_edge_hasher::hash (call_stmt));
715
716 /* This loop may turn out to be performance problem. In such case adding
717 hashtables into call nodes with very many edges is probably best
718 solution. It is not good idea to add pointer into CALL_EXPR itself
719 because we want to make possible having multiple cgraph nodes representing
720 different clones of the same body before the body is actually cloned. */
721 for (e = callees; e; e = e->next_callee)
722 {
723 if (e->call_stmt == call_stmt)
724 break;
725 n++;
726 }
727
728 if (!e)
729 for (e = indirect_calls; e; e = e->next_callee)
730 {
731 if (e->call_stmt == call_stmt)
732 break;
733 n++;
734 }
735
736 if (n > 100)
737 {
738 call_site_hash = hash_table<cgraph_edge_hasher>::create_ggc (120);
739 for (e2 = callees; e2; e2 = e2->next_callee)
740 cgraph_add_edge_to_call_site_hash (e2);
741 for (e2 = indirect_calls; e2; e2 = e2->next_callee)
742 cgraph_add_edge_to_call_site_hash (e2);
743 }
744
745 return e;
746 }
747
748
749 /* Change field call_stmt of edge to NEW_STMT.
750 If UPDATE_SPECULATIVE and E is any component of speculative
751 edge, then update all components. */
752
753 void
754 cgraph_edge::set_call_stmt (gcall *new_stmt, bool update_speculative)
755 {
756 tree decl;
757
758 /* Speculative edges has three component, update all of them
759 when asked to. */
760 if (update_speculative && speculative)
761 {
762 cgraph_edge *direct, *indirect;
763 ipa_ref *ref;
764
765 speculative_call_info (direct, indirect, ref);
766 direct->set_call_stmt (new_stmt, false);
767 indirect->set_call_stmt (new_stmt, false);
768 ref->stmt = new_stmt;
769 return;
770 }
771
772 /* Only direct speculative edges go to call_site_hash. */
773 if (caller->call_site_hash
774 && (!speculative || !indirect_unknown_callee))
775 {
776 caller->call_site_hash->remove_elt_with_hash
777 (call_stmt, cgraph_edge_hasher::hash (call_stmt));
778 }
779
780 cgraph_edge *e = this;
781
782 call_stmt = new_stmt;
783 if (indirect_unknown_callee
784 && (decl = gimple_call_fndecl (new_stmt)))
785 {
786 /* Constant propagation (and possibly also inlining?) can turn an
787 indirect call into a direct one. */
788 cgraph_node *new_callee = cgraph_node::get (decl);
789
790 gcc_checking_assert (new_callee);
791 e = make_direct (new_callee);
792 }
793
794 push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
795 e->can_throw_external = stmt_can_throw_external (new_stmt);
796 pop_cfun ();
797 if (e->caller->call_site_hash)
798 cgraph_add_edge_to_call_site_hash (e);
799 }
800
801 /* Allocate a cgraph_edge structure and fill it with data according to the
802 parameters of which only CALLEE can be NULL (when creating an indirect call
803 edge). */
804
805 cgraph_edge *
806 symbol_table::create_edge (cgraph_node *caller, cgraph_node *callee,
807 gcall *call_stmt, gcov_type count, int freq,
808 bool indir_unknown_callee)
809 {
810 cgraph_edge *edge;
811
812 /* LTO does not actually have access to the call_stmt since these
813 have not been loaded yet. */
814 if (call_stmt)
815 {
816 /* This is a rather expensive check possibly triggering
817 construction of call stmt hashtable. */
818 cgraph_edge *e;
819 gcc_checking_assert (!(e = caller->get_edge (call_stmt))
820 || e->speculative);
821
822 gcc_assert (is_gimple_call (call_stmt));
823 }
824
825 if (free_edges)
826 {
827 edge = free_edges;
828 free_edges = NEXT_FREE_EDGE (edge);
829 }
830 else
831 {
832 edge = ggc_alloc<cgraph_edge> ();
833 edge->uid = edges_max_uid++;
834 }
835
836 edges_count++;
837
838 edge->aux = NULL;
839 edge->caller = caller;
840 edge->callee = callee;
841 edge->prev_caller = NULL;
842 edge->next_caller = NULL;
843 edge->prev_callee = NULL;
844 edge->next_callee = NULL;
845 edge->lto_stmt_uid = 0;
846
847 edge->count = count;
848 gcc_assert (count >= 0);
849 edge->frequency = freq;
850 gcc_assert (freq >= 0);
851 gcc_assert (freq <= CGRAPH_FREQ_MAX);
852
853 edge->call_stmt = call_stmt;
854 push_cfun (DECL_STRUCT_FUNCTION (caller->decl));
855 edge->can_throw_external
856 = call_stmt ? stmt_can_throw_external (call_stmt) : false;
857 pop_cfun ();
858 if (call_stmt
859 && callee && callee->decl
860 && !gimple_check_call_matching_types (call_stmt, callee->decl,
861 false))
862 {
863 edge->inline_failed = CIF_MISMATCHED_ARGUMENTS;
864 edge->call_stmt_cannot_inline_p = true;
865 }
866 else
867 {
868 edge->inline_failed = CIF_FUNCTION_NOT_CONSIDERED;
869 edge->call_stmt_cannot_inline_p = false;
870 }
871
872 edge->indirect_info = NULL;
873 edge->indirect_inlining_edge = 0;
874 edge->speculative = false;
875 edge->indirect_unknown_callee = indir_unknown_callee;
876 if (opt_for_fn (edge->caller->decl, flag_devirtualize)
877 && call_stmt && DECL_STRUCT_FUNCTION (caller->decl))
878 edge->in_polymorphic_cdtor
879 = decl_maybe_in_construction_p (NULL, NULL, call_stmt,
880 caller->decl);
881 else
882 edge->in_polymorphic_cdtor = caller->thunk.thunk_p;
883 if (call_stmt && caller->call_site_hash)
884 cgraph_add_edge_to_call_site_hash (edge);
885
886 return edge;
887 }
888
889 /* Create edge from a given function to CALLEE in the cgraph. */
890
891 cgraph_edge *
892 cgraph_node::create_edge (cgraph_node *callee,
893 gcall *call_stmt, gcov_type count, int freq)
894 {
895 cgraph_edge *edge = symtab->create_edge (this, callee, call_stmt, count,
896 freq, false);
897
898 initialize_inline_failed (edge);
899
900 edge->next_caller = callee->callers;
901 if (callee->callers)
902 callee->callers->prev_caller = edge;
903 edge->next_callee = callees;
904 if (callees)
905 callees->prev_callee = edge;
906 callees = edge;
907 callee->callers = edge;
908
909 return edge;
910 }
911
912 /* Allocate cgraph_indirect_call_info and set its fields to default values. */
913
914 cgraph_indirect_call_info *
915 cgraph_allocate_init_indirect_info (void)
916 {
917 cgraph_indirect_call_info *ii;
918
919 ii = ggc_cleared_alloc<cgraph_indirect_call_info> ();
920 ii->param_index = -1;
921 return ii;
922 }
923
924 /* Create an indirect edge with a yet-undetermined callee where the call
925 statement destination is a formal parameter of the caller with index
926 PARAM_INDEX. */
927
928 cgraph_edge *
929 cgraph_node::create_indirect_edge (gcall *call_stmt, int ecf_flags,
930 gcov_type count, int freq,
931 bool compute_indirect_info)
932 {
933 cgraph_edge *edge = symtab->create_edge (this, NULL, call_stmt,
934 count, freq, true);
935 tree target;
936
937 initialize_inline_failed (edge);
938
939 edge->indirect_info = cgraph_allocate_init_indirect_info ();
940 edge->indirect_info->ecf_flags = ecf_flags;
941 edge->indirect_info->vptr_changed = true;
942
943 /* Record polymorphic call info. */
944 if (compute_indirect_info
945 && call_stmt
946 && (target = gimple_call_fn (call_stmt))
947 && virtual_method_call_p (target))
948 {
949 ipa_polymorphic_call_context context (decl, target, call_stmt);
950
951 /* Only record types can have virtual calls. */
952 edge->indirect_info->polymorphic = true;
953 edge->indirect_info->param_index = -1;
954 edge->indirect_info->otr_token
955 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
956 edge->indirect_info->otr_type = obj_type_ref_class (target);
957 gcc_assert (TREE_CODE (edge->indirect_info->otr_type) == RECORD_TYPE);
958 edge->indirect_info->context = context;
959 }
960
961 edge->next_callee = indirect_calls;
962 if (indirect_calls)
963 indirect_calls->prev_callee = edge;
964 indirect_calls = edge;
965
966 return edge;
967 }
968
969 /* Remove the edge from the list of the callees of the caller. */
970
971 void
972 cgraph_edge::remove_caller (void)
973 {
974 if (prev_callee)
975 prev_callee->next_callee = next_callee;
976 if (next_callee)
977 next_callee->prev_callee = prev_callee;
978 if (!prev_callee)
979 {
980 if (indirect_unknown_callee)
981 caller->indirect_calls = next_callee;
982 else
983 caller->callees = next_callee;
984 }
985 if (caller->call_site_hash)
986 caller->call_site_hash->remove_elt_with_hash
987 (call_stmt, cgraph_edge_hasher::hash (call_stmt));
988 }
989
990 /* Put the edge onto the free list. */
991
992 void
993 symbol_table::free_edge (cgraph_edge *e)
994 {
995 int uid = e->uid;
996
997 if (e->indirect_info)
998 ggc_free (e->indirect_info);
999
1000 /* Clear out the edge so we do not dangle pointers. */
1001 memset (e, 0, sizeof (*e));
1002 e->uid = uid;
1003 NEXT_FREE_EDGE (e) = free_edges;
1004 free_edges = e;
1005 edges_count--;
1006 }
1007
1008 /* Remove the edge in the cgraph. */
1009
1010 void
1011 cgraph_edge::remove (void)
1012 {
1013 /* Call all edge removal hooks. */
1014 symtab->call_edge_removal_hooks (this);
1015
1016 if (!indirect_unknown_callee)
1017 /* Remove from callers list of the callee. */
1018 remove_callee ();
1019
1020 /* Remove from callees list of the callers. */
1021 remove_caller ();
1022
1023 /* Put the edge onto the free list. */
1024 symtab->free_edge (this);
1025 }
1026
1027 /* Turn edge into speculative call calling N2. Update
1028 the profile so the direct call is taken COUNT times
1029 with FREQUENCY.
1030
1031 At clone materialization time, the indirect call E will
1032 be expanded as:
1033
1034 if (call_dest == N2)
1035 n2 ();
1036 else
1037 call call_dest
1038
1039 At this time the function just creates the direct call,
1040 the referencd representing the if conditional and attaches
1041 them all to the orginal indirect call statement.
1042
1043 Return direct edge created. */
1044
1045 cgraph_edge *
1046 cgraph_edge::make_speculative (cgraph_node *n2, gcov_type direct_count,
1047 int direct_frequency)
1048 {
1049 cgraph_node *n = caller;
1050 ipa_ref *ref = NULL;
1051 cgraph_edge *e2;
1052
1053 if (dump_file)
1054 {
1055 fprintf (dump_file, "Indirect call -> speculative call"
1056 " %s/%i => %s/%i\n",
1057 xstrdup_for_dump (n->name ()), n->order,
1058 xstrdup_for_dump (n2->name ()), n2->order);
1059 }
1060 speculative = true;
1061 e2 = n->create_edge (n2, call_stmt, direct_count, direct_frequency);
1062 initialize_inline_failed (e2);
1063 e2->speculative = true;
1064 if (TREE_NOTHROW (n2->decl))
1065 e2->can_throw_external = false;
1066 else
1067 e2->can_throw_external = can_throw_external;
1068 e2->lto_stmt_uid = lto_stmt_uid;
1069 e2->in_polymorphic_cdtor = in_polymorphic_cdtor;
1070 count -= e2->count;
1071 frequency -= e2->frequency;
1072 symtab->call_edge_duplication_hooks (this, e2);
1073 ref = n->create_reference (n2, IPA_REF_ADDR, call_stmt);
1074 ref->lto_stmt_uid = lto_stmt_uid;
1075 ref->speculative = speculative;
1076 n2->mark_address_taken ();
1077 return e2;
1078 }
1079
1080 /* Speculative call consist of three components:
1081 1) an indirect edge representing the original call
1082 2) an direct edge representing the new call
1083 3) ADDR_EXPR reference representing the speculative check.
1084 All three components are attached to single statement (the indirect
1085 call) and if one of them exists, all of them must exist.
1086
1087 Given speculative call edge, return all three components.
1088 */
1089
1090 void
1091 cgraph_edge::speculative_call_info (cgraph_edge *&direct,
1092 cgraph_edge *&indirect,
1093 ipa_ref *&reference)
1094 {
1095 ipa_ref *ref;
1096 int i;
1097 cgraph_edge *e2;
1098 cgraph_edge *e = this;
1099
1100 if (!e->indirect_unknown_callee)
1101 for (e2 = e->caller->indirect_calls;
1102 e2->call_stmt != e->call_stmt || e2->lto_stmt_uid != e->lto_stmt_uid;
1103 e2 = e2->next_callee)
1104 ;
1105 else
1106 {
1107 e2 = e;
1108 /* We can take advantage of the call stmt hash. */
1109 if (e2->call_stmt)
1110 {
1111 e = e->caller->get_edge (e2->call_stmt);
1112 gcc_assert (e->speculative && !e->indirect_unknown_callee);
1113 }
1114 else
1115 for (e = e->caller->callees;
1116 e2->call_stmt != e->call_stmt
1117 || e2->lto_stmt_uid != e->lto_stmt_uid;
1118 e = e->next_callee)
1119 ;
1120 }
1121 gcc_assert (e->speculative && e2->speculative);
1122 direct = e;
1123 indirect = e2;
1124
1125 reference = NULL;
1126 for (i = 0; e->caller->iterate_reference (i, ref); i++)
1127 if (ref->speculative
1128 && ((ref->stmt && ref->stmt == e->call_stmt)
1129 || (!ref->stmt && ref->lto_stmt_uid == e->lto_stmt_uid)))
1130 {
1131 reference = ref;
1132 break;
1133 }
1134
1135 /* Speculative edge always consist of all three components - direct edge,
1136 indirect and reference. */
1137
1138 gcc_assert (e && e2 && ref);
1139 }
1140
1141 /* Speculative call edge turned out to be direct call to CALLE_DECL.
1142 Remove the speculative call sequence and return edge representing the call.
1143 It is up to caller to redirect the call as appropriate. */
1144
1145 cgraph_edge *
1146 cgraph_edge::resolve_speculation (tree callee_decl)
1147 {
1148 cgraph_edge *edge = this;
1149 cgraph_edge *e2;
1150 ipa_ref *ref;
1151
1152 gcc_assert (edge->speculative);
1153 edge->speculative_call_info (e2, edge, ref);
1154 if (!callee_decl
1155 || !ref->referred->semantically_equivalent_p
1156 (symtab_node::get (callee_decl)))
1157 {
1158 if (dump_file)
1159 {
1160 if (callee_decl)
1161 {
1162 fprintf (dump_file, "Speculative indirect call %s/%i => %s/%i has "
1163 "turned out to have contradicting known target ",
1164 xstrdup_for_dump (edge->caller->name ()),
1165 edge->caller->order,
1166 xstrdup_for_dump (e2->callee->name ()),
1167 e2->callee->order);
1168 print_generic_expr (dump_file, callee_decl, 0);
1169 fprintf (dump_file, "\n");
1170 }
1171 else
1172 {
1173 fprintf (dump_file, "Removing speculative call %s/%i => %s/%i\n",
1174 xstrdup_for_dump (edge->caller->name ()),
1175 edge->caller->order,
1176 xstrdup_for_dump (e2->callee->name ()),
1177 e2->callee->order);
1178 }
1179 }
1180 }
1181 else
1182 {
1183 cgraph_edge *tmp = edge;
1184 if (dump_file)
1185 fprintf (dump_file, "Speculative call turned into direct call.\n");
1186 edge = e2;
1187 e2 = tmp;
1188 /* FIXME: If EDGE is inlined, we should scale up the frequencies and counts
1189 in the functions inlined through it. */
1190 }
1191 edge->count += e2->count;
1192 edge->frequency += e2->frequency;
1193 if (edge->frequency > CGRAPH_FREQ_MAX)
1194 edge->frequency = CGRAPH_FREQ_MAX;
1195 edge->speculative = false;
1196 e2->speculative = false;
1197 ref->remove_reference ();
1198 if (e2->indirect_unknown_callee || e2->inline_failed)
1199 e2->remove ();
1200 else
1201 e2->callee->remove_symbol_and_inline_clones ();
1202 if (edge->caller->call_site_hash)
1203 cgraph_update_edge_in_call_site_hash (edge);
1204 return edge;
1205 }
1206
1207 /* Make an indirect edge with an unknown callee an ordinary edge leading to
1208 CALLEE. DELTA is an integer constant that is to be added to the this
1209 pointer (first parameter) to compensate for skipping a thunk adjustment. */
1210
1211 cgraph_edge *
1212 cgraph_edge::make_direct (cgraph_node *callee)
1213 {
1214 cgraph_edge *edge = this;
1215 gcc_assert (indirect_unknown_callee);
1216
1217 /* If we are redirecting speculative call, make it non-speculative. */
1218 if (indirect_unknown_callee && speculative)
1219 {
1220 edge = edge->resolve_speculation (callee->decl);
1221
1222 /* On successful speculation just return the pre existing direct edge. */
1223 if (!indirect_unknown_callee)
1224 return edge;
1225 }
1226
1227 indirect_unknown_callee = 0;
1228 ggc_free (indirect_info);
1229 indirect_info = NULL;
1230
1231 /* Get the edge out of the indirect edge list. */
1232 if (prev_callee)
1233 prev_callee->next_callee = next_callee;
1234 if (next_callee)
1235 next_callee->prev_callee = prev_callee;
1236 if (!prev_callee)
1237 caller->indirect_calls = next_callee;
1238
1239 /* Put it into the normal callee list */
1240 prev_callee = NULL;
1241 next_callee = caller->callees;
1242 if (caller->callees)
1243 caller->callees->prev_callee = edge;
1244 caller->callees = edge;
1245
1246 /* Insert to callers list of the new callee. */
1247 edge->set_callee (callee);
1248
1249 if (call_stmt
1250 && !gimple_check_call_matching_types (call_stmt, callee->decl, false))
1251 {
1252 call_stmt_cannot_inline_p = true;
1253 inline_failed = CIF_MISMATCHED_ARGUMENTS;
1254 }
1255
1256 /* We need to re-determine the inlining status of the edge. */
1257 initialize_inline_failed (edge);
1258 return edge;
1259 }
1260
1261 /* If necessary, change the function declaration in the call statement
1262 associated with E so that it corresponds to the edge callee. */
1263
1264 gimple *
1265 cgraph_edge::redirect_call_stmt_to_callee (void)
1266 {
1267 cgraph_edge *e = this;
1268
1269 tree decl = gimple_call_fndecl (e->call_stmt);
1270 tree lhs = gimple_call_lhs (e->call_stmt);
1271 gcall *new_stmt;
1272 gimple_stmt_iterator gsi;
1273 bool skip_bounds = false;
1274
1275 if (e->speculative)
1276 {
1277 cgraph_edge *e2;
1278 gcall *new_stmt;
1279 ipa_ref *ref;
1280
1281 e->speculative_call_info (e, e2, ref);
1282 /* If there already is an direct call (i.e. as a result of inliner's
1283 substitution), forget about speculating. */
1284 if (decl)
1285 e = e->resolve_speculation (decl);
1286 /* If types do not match, speculation was likely wrong.
1287 The direct edge was possibly redirected to the clone with a different
1288 signature. We did not update the call statement yet, so compare it
1289 with the reference that still points to the proper type. */
1290 else if (!gimple_check_call_matching_types (e->call_stmt,
1291 ref->referred->decl,
1292 true))
1293 {
1294 if (dump_file)
1295 fprintf (dump_file, "Not expanding speculative call of %s/%i -> %s/%i\n"
1296 "Type mismatch.\n",
1297 xstrdup_for_dump (e->caller->name ()),
1298 e->caller->order,
1299 xstrdup_for_dump (e->callee->name ()),
1300 e->callee->order);
1301 e = e->resolve_speculation ();
1302 /* We are producing the final function body and will throw away the
1303 callgraph edges really soon. Reset the counts/frequencies to
1304 keep verifier happy in the case of roundoff errors. */
1305 e->count = gimple_bb (e->call_stmt)->count;
1306 e->frequency = compute_call_stmt_bb_frequency
1307 (e->caller->decl, gimple_bb (e->call_stmt));
1308 }
1309 /* Expand speculation into GIMPLE code. */
1310 else
1311 {
1312 if (dump_file)
1313 fprintf (dump_file,
1314 "Expanding speculative call of %s/%i -> %s/%i count:"
1315 "%" PRId64"\n",
1316 xstrdup_for_dump (e->caller->name ()),
1317 e->caller->order,
1318 xstrdup_for_dump (e->callee->name ()),
1319 e->callee->order,
1320 (int64_t)e->count);
1321 gcc_assert (e2->speculative);
1322 push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
1323 new_stmt = gimple_ic (e->call_stmt,
1324 dyn_cast<cgraph_node *> (ref->referred),
1325 e->count || e2->count
1326 ? RDIV (e->count * REG_BR_PROB_BASE,
1327 e->count + e2->count)
1328 : e->frequency || e2->frequency
1329 ? RDIV (e->frequency * REG_BR_PROB_BASE,
1330 e->frequency + e2->frequency)
1331 : REG_BR_PROB_BASE / 2,
1332 e->count, e->count + e2->count);
1333 e->speculative = false;
1334 e->caller->set_call_stmt_including_clones (e->call_stmt, new_stmt,
1335 false);
1336
1337 /* Fix edges for BUILT_IN_CHKP_BNDRET calls attached to the
1338 processed call stmt. */
1339 if (gimple_call_with_bounds_p (new_stmt)
1340 && gimple_call_lhs (new_stmt)
1341 && chkp_retbnd_call_by_val (gimple_call_lhs (e2->call_stmt)))
1342 {
1343 tree dresult = gimple_call_lhs (new_stmt);
1344 tree iresult = gimple_call_lhs (e2->call_stmt);
1345 gcall *dbndret = chkp_retbnd_call_by_val (dresult);
1346 gcall *ibndret = chkp_retbnd_call_by_val (iresult);
1347 struct cgraph_edge *iedge
1348 = e2->caller->cgraph_node::get_edge (ibndret);
1349 struct cgraph_edge *dedge;
1350
1351 if (dbndret)
1352 {
1353 dedge = iedge->caller->create_edge (iedge->callee,
1354 dbndret, e->count,
1355 e->frequency);
1356 dedge->frequency = compute_call_stmt_bb_frequency
1357 (dedge->caller->decl, gimple_bb (dedge->call_stmt));
1358 }
1359 iedge->frequency = compute_call_stmt_bb_frequency
1360 (iedge->caller->decl, gimple_bb (iedge->call_stmt));
1361 }
1362
1363 e->frequency = compute_call_stmt_bb_frequency
1364 (e->caller->decl, gimple_bb (e->call_stmt));
1365 e2->frequency = compute_call_stmt_bb_frequency
1366 (e2->caller->decl, gimple_bb (e2->call_stmt));
1367 e2->speculative = false;
1368 ref->speculative = false;
1369 ref->stmt = NULL;
1370 /* Indirect edges are not both in the call site hash.
1371 get it updated. */
1372 if (e->caller->call_site_hash)
1373 cgraph_update_edge_in_call_site_hash (e2);
1374 pop_cfun ();
1375 /* Continue redirecting E to proper target. */
1376 }
1377 }
1378
1379 /* We might propagate instrumented function pointer into
1380 not instrumented function and vice versa. In such a
1381 case we need to either fix function declaration or
1382 remove bounds from call statement. */
1383 if (flag_check_pointer_bounds && e->callee)
1384 skip_bounds = chkp_redirect_edge (e);
1385
1386 if (e->indirect_unknown_callee
1387 || (decl == e->callee->decl
1388 && !skip_bounds))
1389 return e->call_stmt;
1390
1391 if (flag_checking && decl)
1392 {
1393 cgraph_node *node = cgraph_node::get (decl);
1394 gcc_assert (!node || !node->clone.combined_args_to_skip);
1395 }
1396
1397 if (symtab->dump_file)
1398 {
1399 fprintf (symtab->dump_file, "updating call of %s/%i -> %s/%i: ",
1400 xstrdup_for_dump (e->caller->name ()), e->caller->order,
1401 xstrdup_for_dump (e->callee->name ()), e->callee->order);
1402 print_gimple_stmt (symtab->dump_file, e->call_stmt, 0, dump_flags);
1403 if (e->callee->clone.combined_args_to_skip)
1404 {
1405 fprintf (symtab->dump_file, " combined args to skip: ");
1406 dump_bitmap (symtab->dump_file,
1407 e->callee->clone.combined_args_to_skip);
1408 }
1409 }
1410
1411 if (e->callee->clone.combined_args_to_skip
1412 || skip_bounds)
1413 {
1414 int lp_nr;
1415
1416 new_stmt = e->call_stmt;
1417 if (e->callee->clone.combined_args_to_skip)
1418 new_stmt
1419 = gimple_call_copy_skip_args (new_stmt,
1420 e->callee->clone.combined_args_to_skip);
1421 if (skip_bounds)
1422 new_stmt = chkp_copy_call_skip_bounds (new_stmt);
1423
1424 gimple_call_set_fndecl (new_stmt, e->callee->decl);
1425 gimple_call_set_fntype (new_stmt, gimple_call_fntype (e->call_stmt));
1426
1427 if (gimple_vdef (new_stmt)
1428 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1429 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1430
1431 gsi = gsi_for_stmt (e->call_stmt);
1432
1433 /* For optimized away parameters, add on the caller side
1434 before the call
1435 DEBUG D#X => parm_Y(D)
1436 stmts and associate D#X with parm in decl_debug_args_lookup
1437 vector to say for debug info that if parameter parm had been passed,
1438 it would have value parm_Y(D). */
1439 if (e->callee->clone.combined_args_to_skip && MAY_HAVE_DEBUG_STMTS)
1440 {
1441 vec<tree, va_gc> **debug_args
1442 = decl_debug_args_lookup (e->callee->decl);
1443 tree old_decl = gimple_call_fndecl (e->call_stmt);
1444 if (debug_args && old_decl)
1445 {
1446 tree parm;
1447 unsigned i = 0, num;
1448 unsigned len = vec_safe_length (*debug_args);
1449 unsigned nargs = gimple_call_num_args (e->call_stmt);
1450 for (parm = DECL_ARGUMENTS (old_decl), num = 0;
1451 parm && num < nargs;
1452 parm = DECL_CHAIN (parm), num++)
1453 if (bitmap_bit_p (e->callee->clone.combined_args_to_skip, num)
1454 && is_gimple_reg (parm))
1455 {
1456 unsigned last = i;
1457
1458 while (i < len && (**debug_args)[i] != DECL_ORIGIN (parm))
1459 i += 2;
1460 if (i >= len)
1461 {
1462 i = 0;
1463 while (i < last
1464 && (**debug_args)[i] != DECL_ORIGIN (parm))
1465 i += 2;
1466 if (i >= last)
1467 continue;
1468 }
1469 tree ddecl = (**debug_args)[i + 1];
1470 tree arg = gimple_call_arg (e->call_stmt, num);
1471 if (!useless_type_conversion_p (TREE_TYPE (ddecl),
1472 TREE_TYPE (arg)))
1473 {
1474 tree rhs1;
1475 if (!fold_convertible_p (TREE_TYPE (ddecl), arg))
1476 continue;
1477 if (TREE_CODE (arg) == SSA_NAME
1478 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (arg))
1479 && (rhs1
1480 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (arg)))
1481 && useless_type_conversion_p (TREE_TYPE (ddecl),
1482 TREE_TYPE (rhs1)))
1483 arg = rhs1;
1484 else
1485 arg = fold_convert (TREE_TYPE (ddecl), arg);
1486 }
1487
1488 gimple *def_temp
1489 = gimple_build_debug_bind (ddecl, unshare_expr (arg),
1490 e->call_stmt);
1491 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
1492 }
1493 }
1494 }
1495
1496 gsi_replace (&gsi, new_stmt, false);
1497 /* We need to defer cleaning EH info on the new statement to
1498 fixup-cfg. We may not have dominator information at this point
1499 and thus would end up with unreachable blocks and have no way
1500 to communicate that we need to run CFG cleanup then. */
1501 lp_nr = lookup_stmt_eh_lp (e->call_stmt);
1502 if (lp_nr != 0)
1503 {
1504 remove_stmt_from_eh_lp (e->call_stmt);
1505 add_stmt_to_eh_lp (new_stmt, lp_nr);
1506 }
1507 }
1508 else
1509 {
1510 new_stmt = e->call_stmt;
1511 gimple_call_set_fndecl (new_stmt, e->callee->decl);
1512 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1513 }
1514
1515 /* If the call becomes noreturn, remove the LHS if possible. */
1516 if (gimple_call_noreturn_p (new_stmt) && should_remove_lhs_p (lhs))
1517 {
1518 if (TREE_CODE (lhs) == SSA_NAME)
1519 {
1520 tree var = create_tmp_reg_fn (DECL_STRUCT_FUNCTION (e->caller->decl),
1521 TREE_TYPE (lhs), NULL);
1522 var = get_or_create_ssa_default_def
1523 (DECL_STRUCT_FUNCTION (e->caller->decl), var);
1524 gimple *set_stmt = gimple_build_assign (lhs, var);
1525 gsi = gsi_for_stmt (new_stmt);
1526 gsi_insert_before_without_update (&gsi, set_stmt, GSI_SAME_STMT);
1527 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), set_stmt);
1528 }
1529 gimple_call_set_lhs (new_stmt, NULL_TREE);
1530 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1531 }
1532
1533 /* If new callee has no static chain, remove it. */
1534 if (gimple_call_chain (new_stmt) && !DECL_STATIC_CHAIN (e->callee->decl))
1535 {
1536 gimple_call_set_chain (new_stmt, NULL);
1537 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1538 }
1539
1540 maybe_remove_unused_call_args (DECL_STRUCT_FUNCTION (e->caller->decl),
1541 new_stmt);
1542
1543 e->caller->set_call_stmt_including_clones (e->call_stmt, new_stmt, false);
1544
1545 if (symtab->dump_file)
1546 {
1547 fprintf (symtab->dump_file, " updated to:");
1548 print_gimple_stmt (symtab->dump_file, e->call_stmt, 0, dump_flags);
1549 }
1550 return new_stmt;
1551 }
1552
1553 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1554 OLD_STMT changed into NEW_STMT. OLD_CALL is gimple_call_fndecl
1555 of OLD_STMT if it was previously call statement.
1556 If NEW_STMT is NULL, the call has been dropped without any
1557 replacement. */
1558
1559 static void
1560 cgraph_update_edges_for_call_stmt_node (cgraph_node *node,
1561 gimple *old_stmt, tree old_call,
1562 gimple *new_stmt)
1563 {
1564 tree new_call = (new_stmt && is_gimple_call (new_stmt))
1565 ? gimple_call_fndecl (new_stmt) : 0;
1566
1567 /* We are seeing indirect calls, then there is nothing to update. */
1568 if (!new_call && !old_call)
1569 return;
1570 /* See if we turned indirect call into direct call or folded call to one builtin
1571 into different builtin. */
1572 if (old_call != new_call)
1573 {
1574 cgraph_edge *e = node->get_edge (old_stmt);
1575 cgraph_edge *ne = NULL;
1576 gcov_type count;
1577 int frequency;
1578
1579 if (e)
1580 {
1581 /* Keep calls marked as dead dead. */
1582 if (new_stmt && is_gimple_call (new_stmt) && e->callee
1583 && DECL_BUILT_IN_CLASS (e->callee->decl) == BUILT_IN_NORMAL
1584 && DECL_FUNCTION_CODE (e->callee->decl) == BUILT_IN_UNREACHABLE)
1585 {
1586 node->get_edge (old_stmt)->set_call_stmt
1587 (as_a <gcall *> (new_stmt));
1588 return;
1589 }
1590 /* See if the edge is already there and has the correct callee. It
1591 might be so because of indirect inlining has already updated
1592 it. We also might've cloned and redirected the edge. */
1593 if (new_call && e->callee)
1594 {
1595 cgraph_node *callee = e->callee;
1596 while (callee)
1597 {
1598 if (callee->decl == new_call
1599 || callee->former_clone_of == new_call)
1600 {
1601 e->set_call_stmt (as_a <gcall *> (new_stmt));
1602 return;
1603 }
1604 callee = callee->clone_of;
1605 }
1606 }
1607
1608 /* Otherwise remove edge and create new one; we can't simply redirect
1609 since function has changed, so inline plan and other information
1610 attached to edge is invalid. */
1611 count = e->count;
1612 frequency = e->frequency;
1613 if (e->indirect_unknown_callee || e->inline_failed)
1614 e->remove ();
1615 else
1616 e->callee->remove_symbol_and_inline_clones ();
1617 }
1618 else if (new_call)
1619 {
1620 /* We are seeing new direct call; compute profile info based on BB. */
1621 basic_block bb = gimple_bb (new_stmt);
1622 count = bb->count;
1623 frequency = compute_call_stmt_bb_frequency (current_function_decl,
1624 bb);
1625 }
1626
1627 if (new_call)
1628 {
1629 ne = node->create_edge (cgraph_node::get_create (new_call),
1630 as_a <gcall *> (new_stmt), count,
1631 frequency);
1632 gcc_assert (ne->inline_failed);
1633 }
1634 }
1635 /* We only updated the call stmt; update pointer in cgraph edge.. */
1636 else if (old_stmt != new_stmt)
1637 node->get_edge (old_stmt)->set_call_stmt (as_a <gcall *> (new_stmt));
1638 }
1639
1640 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1641 OLD_STMT changed into NEW_STMT. OLD_DECL is gimple_call_fndecl
1642 of OLD_STMT before it was updated (updating can happen inplace). */
1643
1644 void
1645 cgraph_update_edges_for_call_stmt (gimple *old_stmt, tree old_decl,
1646 gimple *new_stmt)
1647 {
1648 cgraph_node *orig = cgraph_node::get (cfun->decl);
1649 cgraph_node *node;
1650
1651 gcc_checking_assert (orig);
1652 cgraph_update_edges_for_call_stmt_node (orig, old_stmt, old_decl, new_stmt);
1653 if (orig->clones)
1654 for (node = orig->clones; node != orig;)
1655 {
1656 cgraph_update_edges_for_call_stmt_node (node, old_stmt, old_decl, new_stmt);
1657 if (node->clones)
1658 node = node->clones;
1659 else if (node->next_sibling_clone)
1660 node = node->next_sibling_clone;
1661 else
1662 {
1663 while (node != orig && !node->next_sibling_clone)
1664 node = node->clone_of;
1665 if (node != orig)
1666 node = node->next_sibling_clone;
1667 }
1668 }
1669 }
1670
1671
1672 /* Remove all callees from the node. */
1673
1674 void
1675 cgraph_node::remove_callees (void)
1676 {
1677 cgraph_edge *e, *f;
1678
1679 /* It is sufficient to remove the edges from the lists of callers of
1680 the callees. The callee list of the node can be zapped with one
1681 assignment. */
1682 for (e = callees; e; e = f)
1683 {
1684 f = e->next_callee;
1685 symtab->call_edge_removal_hooks (e);
1686 if (!e->indirect_unknown_callee)
1687 e->remove_callee ();
1688 symtab->free_edge (e);
1689 }
1690 for (e = indirect_calls; e; e = f)
1691 {
1692 f = e->next_callee;
1693 symtab->call_edge_removal_hooks (e);
1694 if (!e->indirect_unknown_callee)
1695 e->remove_callee ();
1696 symtab->free_edge (e);
1697 }
1698 indirect_calls = NULL;
1699 callees = NULL;
1700 if (call_site_hash)
1701 {
1702 call_site_hash->empty ();
1703 call_site_hash = NULL;
1704 }
1705 }
1706
1707 /* Remove all callers from the node. */
1708
1709 void
1710 cgraph_node::remove_callers (void)
1711 {
1712 cgraph_edge *e, *f;
1713
1714 /* It is sufficient to remove the edges from the lists of callees of
1715 the callers. The caller list of the node can be zapped with one
1716 assignment. */
1717 for (e = callers; e; e = f)
1718 {
1719 f = e->next_caller;
1720 symtab->call_edge_removal_hooks (e);
1721 e->remove_caller ();
1722 symtab->free_edge (e);
1723 }
1724 callers = NULL;
1725 }
1726
1727 /* Helper function for cgraph_release_function_body and free_lang_data.
1728 It releases body from function DECL without having to inspect its
1729 possibly non-existent symtab node. */
1730
1731 void
1732 release_function_body (tree decl)
1733 {
1734 function *fn = DECL_STRUCT_FUNCTION (decl);
1735 if (fn)
1736 {
1737 if (fn->cfg
1738 && loops_for_fn (fn))
1739 {
1740 fn->curr_properties &= ~PROP_loops;
1741 loop_optimizer_finalize (fn);
1742 }
1743 if (fn->gimple_df)
1744 {
1745 delete_tree_ssa (fn);
1746 fn->eh = NULL;
1747 }
1748 if (fn->cfg)
1749 {
1750 gcc_assert (!dom_info_available_p (fn, CDI_DOMINATORS));
1751 gcc_assert (!dom_info_available_p (fn, CDI_POST_DOMINATORS));
1752 delete_tree_cfg_annotations (fn);
1753 clear_edges (fn);
1754 fn->cfg = NULL;
1755 }
1756 if (fn->value_histograms)
1757 free_histograms (fn);
1758 gimple_set_body (decl, NULL);
1759 /* Struct function hangs a lot of data that would leak if we didn't
1760 removed all pointers to it. */
1761 ggc_free (fn);
1762 DECL_STRUCT_FUNCTION (decl) = NULL;
1763 }
1764 DECL_SAVED_TREE (decl) = NULL;
1765 }
1766
1767 /* Release memory used to represent body of function.
1768 Use this only for functions that are released before being translated to
1769 target code (i.e. RTL). Functions that are compiled to RTL and beyond
1770 are free'd in final.c via free_after_compilation().
1771 KEEP_ARGUMENTS are useful only if you want to rebuild body as thunk. */
1772
1773 void
1774 cgraph_node::release_body (bool keep_arguments)
1775 {
1776 ipa_transforms_to_apply.release ();
1777 if (!used_as_abstract_origin && symtab->state != PARSING)
1778 {
1779 DECL_RESULT (decl) = NULL;
1780
1781 if (!keep_arguments)
1782 DECL_ARGUMENTS (decl) = NULL;
1783 }
1784 /* If the node is abstract and needed, then do not clear
1785 DECL_INITIAL of its associated function declaration because it's
1786 needed to emit debug info later. */
1787 if (!used_as_abstract_origin && DECL_INITIAL (decl))
1788 DECL_INITIAL (decl) = error_mark_node;
1789 release_function_body (decl);
1790 if (lto_file_data)
1791 {
1792 lto_free_function_in_decl_state_for_node (this);
1793 lto_file_data = NULL;
1794 }
1795 }
1796
1797 /* Remove function from symbol table. */
1798
1799 void
1800 cgraph_node::remove (void)
1801 {
1802 cgraph_node *n;
1803 int uid = this->uid;
1804
1805 symtab->call_cgraph_removal_hooks (this);
1806 remove_callers ();
1807 remove_callees ();
1808 ipa_transforms_to_apply.release ();
1809
1810 /* Incremental inlining access removed nodes stored in the postorder list.
1811 */
1812 force_output = false;
1813 forced_by_abi = false;
1814 for (n = nested; n; n = n->next_nested)
1815 n->origin = NULL;
1816 nested = NULL;
1817 if (origin)
1818 {
1819 cgraph_node **node2 = &origin->nested;
1820
1821 while (*node2 != this)
1822 node2 = &(*node2)->next_nested;
1823 *node2 = next_nested;
1824 }
1825 unregister ();
1826 if (prev_sibling_clone)
1827 prev_sibling_clone->next_sibling_clone = next_sibling_clone;
1828 else if (clone_of)
1829 clone_of->clones = next_sibling_clone;
1830 if (next_sibling_clone)
1831 next_sibling_clone->prev_sibling_clone = prev_sibling_clone;
1832 if (clones)
1833 {
1834 cgraph_node *n, *next;
1835
1836 if (clone_of)
1837 {
1838 for (n = clones; n->next_sibling_clone; n = n->next_sibling_clone)
1839 n->clone_of = clone_of;
1840 n->clone_of = clone_of;
1841 n->next_sibling_clone = clone_of->clones;
1842 if (clone_of->clones)
1843 clone_of->clones->prev_sibling_clone = n;
1844 clone_of->clones = clones;
1845 }
1846 else
1847 {
1848 /* We are removing node with clones. This makes clones inconsistent,
1849 but assume they will be removed subsequently and just keep clone
1850 tree intact. This can happen in unreachable function removal since
1851 we remove unreachable functions in random order, not by bottom-up
1852 walk of clone trees. */
1853 for (n = clones; n; n = next)
1854 {
1855 next = n->next_sibling_clone;
1856 n->next_sibling_clone = NULL;
1857 n->prev_sibling_clone = NULL;
1858 n->clone_of = NULL;
1859 }
1860 }
1861 }
1862
1863 /* While all the clones are removed after being proceeded, the function
1864 itself is kept in the cgraph even after it is compiled. Check whether
1865 we are done with this body and reclaim it proactively if this is the case.
1866 */
1867 if (symtab->state != LTO_STREAMING)
1868 {
1869 n = cgraph_node::get (decl);
1870 if (!n
1871 || (!n->clones && !n->clone_of && !n->global.inlined_to
1872 && ((symtab->global_info_ready || in_lto_p)
1873 && (TREE_ASM_WRITTEN (n->decl)
1874 || DECL_EXTERNAL (n->decl)
1875 || !n->analyzed
1876 || (!flag_wpa && n->in_other_partition)))))
1877 release_body ();
1878 }
1879 else
1880 {
1881 lto_free_function_in_decl_state_for_node (this);
1882 lto_file_data = NULL;
1883 }
1884
1885 decl = NULL;
1886 if (call_site_hash)
1887 {
1888 call_site_hash->empty ();
1889 call_site_hash = NULL;
1890 }
1891
1892 if (instrumented_version)
1893 {
1894 instrumented_version->instrumented_version = NULL;
1895 instrumented_version = NULL;
1896 }
1897
1898 symtab->release_symbol (this, uid);
1899 }
1900
1901 /* Likewise indicate that a node is having address taken. */
1902
1903 void
1904 cgraph_node::mark_address_taken (void)
1905 {
1906 /* Indirect inlining can figure out that all uses of the address are
1907 inlined. */
1908 if (global.inlined_to)
1909 {
1910 gcc_assert (cfun->after_inlining);
1911 gcc_assert (callers->indirect_inlining_edge);
1912 return;
1913 }
1914 /* FIXME: address_taken flag is used both as a shortcut for testing whether
1915 IPA_REF_ADDR reference exists (and thus it should be set on node
1916 representing alias we take address of) and as a test whether address
1917 of the object was taken (and thus it should be set on node alias is
1918 referring to). We should remove the first use and the remove the
1919 following set. */
1920 address_taken = 1;
1921 cgraph_node *node = ultimate_alias_target ();
1922 node->address_taken = 1;
1923 }
1924
1925 /* Return local info for the compiled function. */
1926
1927 cgraph_local_info *
1928 cgraph_node::local_info (tree decl)
1929 {
1930 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1931 cgraph_node *node = get (decl);
1932 if (!node)
1933 return NULL;
1934 return &node->ultimate_alias_target ()->local;
1935 }
1936
1937 /* Return local info for the compiled function. */
1938
1939 cgraph_rtl_info *
1940 cgraph_node::rtl_info (tree decl)
1941 {
1942 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1943 cgraph_node *node = get (decl);
1944 if (!node)
1945 return NULL;
1946 node = node->ultimate_alias_target ();
1947 if (node->decl != current_function_decl
1948 && !TREE_ASM_WRITTEN (node->decl))
1949 return NULL;
1950 /* Allocate if it doesnt exist. */
1951 if (node->ultimate_alias_target ()->rtl == NULL)
1952 node->ultimate_alias_target ()->rtl = ggc_cleared_alloc<cgraph_rtl_info> ();
1953 return node->ultimate_alias_target ()->rtl;
1954 }
1955
1956 /* Return a string describing the failure REASON. */
1957
1958 const char*
1959 cgraph_inline_failed_string (cgraph_inline_failed_t reason)
1960 {
1961 #undef DEFCIFCODE
1962 #define DEFCIFCODE(code, type, string) string,
1963
1964 static const char *cif_string_table[CIF_N_REASONS] = {
1965 #include "cif-code.def"
1966 };
1967
1968 /* Signedness of an enum type is implementation defined, so cast it
1969 to unsigned before testing. */
1970 gcc_assert ((unsigned) reason < CIF_N_REASONS);
1971 return cif_string_table[reason];
1972 }
1973
1974 /* Return a type describing the failure REASON. */
1975
1976 cgraph_inline_failed_type_t
1977 cgraph_inline_failed_type (cgraph_inline_failed_t reason)
1978 {
1979 #undef DEFCIFCODE
1980 #define DEFCIFCODE(code, type, string) type,
1981
1982 static cgraph_inline_failed_type_t cif_type_table[CIF_N_REASONS] = {
1983 #include "cif-code.def"
1984 };
1985
1986 /* Signedness of an enum type is implementation defined, so cast it
1987 to unsigned before testing. */
1988 gcc_assert ((unsigned) reason < CIF_N_REASONS);
1989 return cif_type_table[reason];
1990 }
1991
1992 /* Names used to print out the availability enum. */
1993 const char * const cgraph_availability_names[] =
1994 {"unset", "not_available", "overwritable", "available", "local"};
1995
1996 /* Output flags of edge to a file F. */
1997
1998 void
1999 cgraph_edge::dump_edge_flags (FILE *f)
2000 {
2001 if (speculative)
2002 fprintf (f, "(speculative) ");
2003 if (!inline_failed)
2004 fprintf (f, "(inlined) ");
2005 if (call_stmt_cannot_inline_p)
2006 fprintf (f, "(call_stmt_cannot_inline_p) ");
2007 if (indirect_inlining_edge)
2008 fprintf (f, "(indirect_inlining) ");
2009 if (count)
2010 fprintf (f, "(%" PRId64"x) ", (int64_t)count);
2011 if (frequency)
2012 fprintf (f, "(%.2f per call) ", frequency / (double)CGRAPH_FREQ_BASE);
2013 if (can_throw_external)
2014 fprintf (f, "(can throw external) ");
2015 }
2016
2017 /* Dump call graph node to file F. */
2018
2019 void
2020 cgraph_node::dump (FILE *f)
2021 {
2022 cgraph_edge *edge;
2023
2024 dump_base (f);
2025
2026 if (global.inlined_to)
2027 fprintf (f, " Function %s/%i is inline copy in %s/%i\n",
2028 xstrdup_for_dump (name ()),
2029 order,
2030 xstrdup_for_dump (global.inlined_to->name ()),
2031 global.inlined_to->order);
2032 if (clone_of)
2033 fprintf (f, " Clone of %s/%i\n",
2034 clone_of->asm_name (),
2035 clone_of->order);
2036 if (symtab->function_flags_ready)
2037 fprintf (f, " Availability: %s\n",
2038 cgraph_availability_names [get_availability ()]);
2039
2040 if (profile_id)
2041 fprintf (f, " Profile id: %i\n",
2042 profile_id);
2043 fprintf (f, " First run: %i\n", tp_first_run);
2044 fprintf (f, " Function flags:");
2045 if (count)
2046 fprintf (f, " executed %" PRId64"x",
2047 (int64_t)count);
2048 if (origin)
2049 fprintf (f, " nested in: %s", origin->asm_name ());
2050 if (gimple_has_body_p (decl))
2051 fprintf (f, " body");
2052 if (process)
2053 fprintf (f, " process");
2054 if (local.local)
2055 fprintf (f, " local");
2056 if (local.redefined_extern_inline)
2057 fprintf (f, " redefined_extern_inline");
2058 if (only_called_at_startup)
2059 fprintf (f, " only_called_at_startup");
2060 if (only_called_at_exit)
2061 fprintf (f, " only_called_at_exit");
2062 if (tm_clone)
2063 fprintf (f, " tm_clone");
2064 if (icf_merged)
2065 fprintf (f, " icf_merged");
2066 if (merged_comdat)
2067 fprintf (f, " merged_comdat");
2068 if (split_part)
2069 fprintf (f, " split_part");
2070 if (indirect_call_target)
2071 fprintf (f, " indirect_call_target");
2072 if (nonfreeing_fn)
2073 fprintf (f, " nonfreeing_fn");
2074 if (DECL_STATIC_CONSTRUCTOR (decl))
2075 fprintf (f," static_constructor (priority:%i)", get_init_priority ());
2076 if (DECL_STATIC_DESTRUCTOR (decl))
2077 fprintf (f," static_destructor (priority:%i)", get_fini_priority ());
2078 if (frequency == NODE_FREQUENCY_HOT)
2079 fprintf (f, " hot");
2080 if (frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
2081 fprintf (f, " unlikely_executed");
2082 if (frequency == NODE_FREQUENCY_EXECUTED_ONCE)
2083 fprintf (f, " executed_once");
2084 if (only_called_at_startup)
2085 fprintf (f, " only_called_at_startup");
2086 if (only_called_at_exit)
2087 fprintf (f, " only_called_at_exit");
2088 if (opt_for_fn (decl, optimize_size))
2089 fprintf (f, " optimize_size");
2090 if (parallelized_function)
2091 fprintf (f, " parallelized_function");
2092
2093 fprintf (f, "\n");
2094
2095 if (thunk.thunk_p)
2096 {
2097 fprintf (f, " Thunk");
2098 if (thunk.alias)
2099 fprintf (f, " of %s (asm: %s)",
2100 lang_hooks.decl_printable_name (thunk.alias, 2),
2101 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk.alias)));
2102 fprintf (f, " fixed offset %i virtual value %i has "
2103 "virtual offset %i)\n",
2104 (int)thunk.fixed_offset,
2105 (int)thunk.virtual_value,
2106 (int)thunk.virtual_offset_p);
2107 }
2108 if (alias && thunk.alias
2109 && DECL_P (thunk.alias))
2110 {
2111 fprintf (f, " Alias of %s",
2112 lang_hooks.decl_printable_name (thunk.alias, 2));
2113 if (DECL_ASSEMBLER_NAME_SET_P (thunk.alias))
2114 fprintf (f, " (asm: %s)",
2115 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk.alias)));
2116 fprintf (f, "\n");
2117 }
2118
2119 fprintf (f, " Called by: ");
2120
2121 for (edge = callers; edge; edge = edge->next_caller)
2122 {
2123 fprintf (f, "%s/%i ", edge->caller->asm_name (),
2124 edge->caller->order);
2125 edge->dump_edge_flags (f);
2126 }
2127
2128 fprintf (f, "\n Calls: ");
2129 for (edge = callees; edge; edge = edge->next_callee)
2130 {
2131 fprintf (f, "%s/%i ", edge->callee->asm_name (),
2132 edge->callee->order);
2133 edge->dump_edge_flags (f);
2134 }
2135 fprintf (f, "\n");
2136
2137 for (edge = indirect_calls; edge; edge = edge->next_callee)
2138 {
2139 if (edge->indirect_info->polymorphic)
2140 {
2141 fprintf (f, " Polymorphic indirect call of type ");
2142 print_generic_expr (f, edge->indirect_info->otr_type, TDF_SLIM);
2143 fprintf (f, " token:%i", (int) edge->indirect_info->otr_token);
2144 }
2145 else
2146 fprintf (f, " Indirect call");
2147 edge->dump_edge_flags (f);
2148 if (edge->indirect_info->param_index != -1)
2149 {
2150 fprintf (f, " of param:%i", edge->indirect_info->param_index);
2151 if (edge->indirect_info->agg_contents)
2152 fprintf (f, " loaded from %s %s at offset %i",
2153 edge->indirect_info->member_ptr ? "member ptr" : "aggregate",
2154 edge->indirect_info->by_ref ? "passed by reference":"",
2155 (int)edge->indirect_info->offset);
2156 if (edge->indirect_info->vptr_changed)
2157 fprintf (f, " (vptr maybe changed)");
2158 }
2159 fprintf (f, "\n");
2160 if (edge->indirect_info->polymorphic)
2161 edge->indirect_info->context.dump (f);
2162 }
2163
2164 if (instrumentation_clone)
2165 fprintf (f, " Is instrumented version.\n");
2166 else if (instrumented_version)
2167 fprintf (f, " Has instrumented version.\n");
2168 }
2169
2170 /* Dump call graph node NODE to stderr. */
2171
2172 DEBUG_FUNCTION void
2173 cgraph_node::debug (void)
2174 {
2175 dump (stderr);
2176 }
2177
2178 /* Dump the callgraph to file F. */
2179
2180 void
2181 cgraph_node::dump_cgraph (FILE *f)
2182 {
2183 cgraph_node *node;
2184
2185 fprintf (f, "callgraph:\n\n");
2186 FOR_EACH_FUNCTION (node)
2187 node->dump (f);
2188 }
2189
2190 /* Return true when the DECL can possibly be inlined. */
2191
2192 bool
2193 cgraph_function_possibly_inlined_p (tree decl)
2194 {
2195 if (!symtab->global_info_ready)
2196 return !DECL_UNINLINABLE (decl);
2197 return DECL_POSSIBLY_INLINED (decl);
2198 }
2199
2200 /* cgraph_node is no longer nested function; update cgraph accordingly. */
2201 void
2202 cgraph_node::unnest (void)
2203 {
2204 cgraph_node **node2 = &origin->nested;
2205 gcc_assert (origin);
2206
2207 while (*node2 != this)
2208 node2 = &(*node2)->next_nested;
2209 *node2 = next_nested;
2210 origin = NULL;
2211 }
2212
2213 /* Return function availability. See cgraph.h for description of individual
2214 return values. */
2215 enum availability
2216 cgraph_node::get_availability (symtab_node *ref)
2217 {
2218 if (ref)
2219 {
2220 cgraph_node *cref = dyn_cast <cgraph_node *> (ref);
2221 if (cref)
2222 ref = cref->global.inlined_to;
2223 }
2224 enum availability avail;
2225 if (!analyzed)
2226 avail = AVAIL_NOT_AVAILABLE;
2227 else if (local.local)
2228 avail = AVAIL_LOCAL;
2229 else if (global.inlined_to)
2230 avail = AVAIL_AVAILABLE;
2231 else if (transparent_alias)
2232 ultimate_alias_target (&avail, ref);
2233 else if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (decl)))
2234 avail = AVAIL_INTERPOSABLE;
2235 else if (!externally_visible)
2236 avail = AVAIL_AVAILABLE;
2237 /* If this is a reference from symbol itself and there are no aliases, we
2238 may be sure that the symbol was not interposed by something else because
2239 the symbol itself would be unreachable otherwise.
2240
2241 Also comdat groups are always resolved in groups. */
2242 else if ((this == ref && !has_aliases_p ())
2243 || (ref && get_comdat_group ()
2244 && get_comdat_group () == ref->get_comdat_group ()))
2245 avail = AVAIL_AVAILABLE;
2246 /* Inline functions are safe to be analyzed even if their symbol can
2247 be overwritten at runtime. It is not meaningful to enforce any sane
2248 behavior on replacing inline function by different body. */
2249 else if (DECL_DECLARED_INLINE_P (decl))
2250 avail = AVAIL_AVAILABLE;
2251
2252 /* If the function can be overwritten, return OVERWRITABLE. Take
2253 care at least of two notable extensions - the COMDAT functions
2254 used to share template instantiations in C++ (this is symmetric
2255 to code cp_cannot_inline_tree_fn and probably shall be shared and
2256 the inlinability hooks completely eliminated). */
2257
2258 else if (decl_replaceable_p (decl) && !DECL_EXTERNAL (decl))
2259 avail = AVAIL_INTERPOSABLE;
2260 else avail = AVAIL_AVAILABLE;
2261
2262 return avail;
2263 }
2264
2265 /* Worker for cgraph_node_can_be_local_p. */
2266 static bool
2267 cgraph_node_cannot_be_local_p_1 (cgraph_node *node, void *)
2268 {
2269 return !(!node->force_output
2270 && ((DECL_COMDAT (node->decl)
2271 && !node->forced_by_abi
2272 && !node->used_from_object_file_p ()
2273 && !node->same_comdat_group)
2274 || !node->externally_visible));
2275 }
2276
2277 /* Return true if cgraph_node can be made local for API change.
2278 Extern inline functions and C++ COMDAT functions can be made local
2279 at the expense of possible code size growth if function is used in multiple
2280 compilation units. */
2281 bool
2282 cgraph_node::can_be_local_p (void)
2283 {
2284 return (!address_taken
2285 && !call_for_symbol_thunks_and_aliases (cgraph_node_cannot_be_local_p_1,
2286 NULL, true));
2287 }
2288
2289 /* Call callback on cgraph_node, thunks and aliases associated to cgraph_node.
2290 When INCLUDE_OVERWRITABLE is false, overwritable symbols are
2291 skipped. When EXCLUDE_VIRTUAL_THUNKS is true, virtual thunks are
2292 skipped. */
2293 bool
2294 cgraph_node::call_for_symbol_thunks_and_aliases (bool (*callback)
2295 (cgraph_node *, void *),
2296 void *data,
2297 bool include_overwritable,
2298 bool exclude_virtual_thunks)
2299 {
2300 cgraph_edge *e;
2301 ipa_ref *ref;
2302 enum availability avail = AVAIL_AVAILABLE;
2303
2304 if (include_overwritable
2305 || (avail = get_availability ()) > AVAIL_INTERPOSABLE)
2306 {
2307 if (callback (this, data))
2308 return true;
2309 }
2310 FOR_EACH_ALIAS (this, ref)
2311 {
2312 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2313 if (include_overwritable
2314 || alias->get_availability () > AVAIL_INTERPOSABLE)
2315 if (alias->call_for_symbol_thunks_and_aliases (callback, data,
2316 include_overwritable,
2317 exclude_virtual_thunks))
2318 return true;
2319 }
2320 if (avail <= AVAIL_INTERPOSABLE)
2321 return false;
2322 for (e = callers; e; e = e->next_caller)
2323 if (e->caller->thunk.thunk_p
2324 && (include_overwritable
2325 || e->caller->get_availability () > AVAIL_INTERPOSABLE)
2326 && !(exclude_virtual_thunks
2327 && e->caller->thunk.virtual_offset_p))
2328 if (e->caller->call_for_symbol_thunks_and_aliases (callback, data,
2329 include_overwritable,
2330 exclude_virtual_thunks))
2331 return true;
2332
2333 return false;
2334 }
2335
2336 /* Worker to bring NODE local. */
2337
2338 bool
2339 cgraph_node::make_local (cgraph_node *node, void *)
2340 {
2341 gcc_checking_assert (node->can_be_local_p ());
2342 if (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
2343 {
2344 node->make_decl_local ();
2345 node->set_section (NULL);
2346 node->set_comdat_group (NULL);
2347 node->externally_visible = false;
2348 node->forced_by_abi = false;
2349 node->local.local = true;
2350 node->set_section (NULL);
2351 node->unique_name = ((node->resolution == LDPR_PREVAILING_DEF_IRONLY
2352 || node->resolution == LDPR_PREVAILING_DEF_IRONLY_EXP)
2353 && !flag_incremental_link);
2354 node->resolution = LDPR_PREVAILING_DEF_IRONLY;
2355 gcc_assert (node->get_availability () == AVAIL_LOCAL);
2356 }
2357 return false;
2358 }
2359
2360 /* Bring cgraph node local. */
2361
2362 void
2363 cgraph_node::make_local (void)
2364 {
2365 call_for_symbol_thunks_and_aliases (cgraph_node::make_local, NULL, true);
2366 }
2367
2368 /* Worker to set nothrow flag. */
2369
2370 static void
2371 set_nothrow_flag_1 (cgraph_node *node, bool nothrow, bool non_call,
2372 bool *changed)
2373 {
2374 cgraph_edge *e;
2375
2376 if (nothrow && !TREE_NOTHROW (node->decl))
2377 {
2378 /* With non-call exceptions we can't say for sure if other function body
2379 was not possibly optimized to stil throw. */
2380 if (!non_call || node->binds_to_current_def_p ())
2381 {
2382 TREE_NOTHROW (node->decl) = true;
2383 *changed = true;
2384 for (e = node->callers; e; e = e->next_caller)
2385 e->can_throw_external = false;
2386 }
2387 }
2388 else if (!nothrow && TREE_NOTHROW (node->decl))
2389 {
2390 TREE_NOTHROW (node->decl) = false;
2391 *changed = true;
2392 }
2393 ipa_ref *ref;
2394 FOR_EACH_ALIAS (node, ref)
2395 {
2396 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2397 if (!nothrow || alias->get_availability () > AVAIL_INTERPOSABLE)
2398 set_nothrow_flag_1 (alias, nothrow, non_call, changed);
2399 }
2400 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
2401 if (e->caller->thunk.thunk_p
2402 && (!nothrow || e->caller->get_availability () > AVAIL_INTERPOSABLE))
2403 set_nothrow_flag_1 (e->caller, nothrow, non_call, changed);
2404 }
2405
2406 /* Set TREE_NOTHROW on NODE's decl and on aliases of NODE
2407 if any to NOTHROW. */
2408
2409 bool
2410 cgraph_node::set_nothrow_flag (bool nothrow)
2411 {
2412 bool changed = false;
2413 bool non_call = opt_for_fn (decl, flag_non_call_exceptions);
2414
2415 if (!nothrow || get_availability () > AVAIL_INTERPOSABLE)
2416 set_nothrow_flag_1 (this, nothrow, non_call, &changed);
2417 else
2418 {
2419 ipa_ref *ref;
2420
2421 FOR_EACH_ALIAS (this, ref)
2422 {
2423 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2424 if (!nothrow || alias->get_availability () > AVAIL_INTERPOSABLE)
2425 set_nothrow_flag_1 (alias, nothrow, non_call, &changed);
2426 }
2427 }
2428 return changed;
2429 }
2430
2431 /* Worker to set_const_flag. */
2432
2433 static void
2434 set_const_flag_1 (cgraph_node *node, bool set_const, bool looping,
2435 bool *changed)
2436 {
2437 /* Static constructors and destructors without a side effect can be
2438 optimized out. */
2439 if (set_const && !looping)
2440 {
2441 if (DECL_STATIC_CONSTRUCTOR (node->decl))
2442 {
2443 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2444 *changed = true;
2445 }
2446 if (DECL_STATIC_DESTRUCTOR (node->decl))
2447 {
2448 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2449 *changed = true;
2450 }
2451 }
2452 if (!set_const)
2453 {
2454 if (TREE_READONLY (node->decl))
2455 {
2456 TREE_READONLY (node->decl) = 0;
2457 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2458 *changed = true;
2459 }
2460 }
2461 else
2462 {
2463 /* Consider function:
2464
2465 bool a(int *p)
2466 {
2467 return *p==*p;
2468 }
2469
2470 During early optimization we will turn this into:
2471
2472 bool a(int *p)
2473 {
2474 return true;
2475 }
2476
2477 Now if this function will be detected as CONST however when interposed
2478 it may end up being just pure. We always must assume the worst
2479 scenario here. */
2480 if (TREE_READONLY (node->decl))
2481 {
2482 if (!looping && DECL_LOOPING_CONST_OR_PURE_P (node->decl))
2483 {
2484 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2485 *changed = true;
2486 }
2487 }
2488 else if (node->binds_to_current_def_p ())
2489 {
2490 TREE_READONLY (node->decl) = true;
2491 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = looping;
2492 DECL_PURE_P (node->decl) = false;
2493 *changed = true;
2494 }
2495 else
2496 {
2497 if (dump_file && (dump_flags & TDF_DETAILS))
2498 fprintf (dump_file, "Dropping state to PURE because function does "
2499 "not bind to current def.\n");
2500 if (!DECL_PURE_P (node->decl))
2501 {
2502 DECL_PURE_P (node->decl) = true;
2503 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = looping;
2504 *changed = true;
2505 }
2506 else if (!looping && DECL_LOOPING_CONST_OR_PURE_P (node->decl))
2507 {
2508 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2509 *changed = true;
2510 }
2511 }
2512 }
2513
2514 ipa_ref *ref;
2515 FOR_EACH_ALIAS (node, ref)
2516 {
2517 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2518 if (!set_const || alias->get_availability () > AVAIL_INTERPOSABLE)
2519 set_const_flag_1 (alias, set_const, looping, changed);
2520 }
2521 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
2522 if (e->caller->thunk.thunk_p
2523 && (!set_const || e->caller->get_availability () > AVAIL_INTERPOSABLE))
2524 {
2525 /* Virtual thunks access virtual offset in the vtable, so they can
2526 only be pure, never const. */
2527 if (set_const
2528 && (e->caller->thunk.virtual_offset_p
2529 || !node->binds_to_current_def_p (e->caller)))
2530 *changed |= e->caller->set_pure_flag (true, looping);
2531 else
2532 set_const_flag_1 (e->caller, set_const, looping, changed);
2533 }
2534 }
2535
2536 /* If SET_CONST is true, mark function, aliases and thunks to be ECF_CONST.
2537 If SET_CONST if false, clear the flag.
2538
2539 When setting the flag be careful about possible interposition and
2540 do not set the flag for functions that can be interposet and set pure
2541 flag for functions that can bind to other definition.
2542
2543 Return true if any change was done. */
2544
2545 bool
2546 cgraph_node::set_const_flag (bool set_const, bool looping)
2547 {
2548 bool changed = false;
2549 if (!set_const || get_availability () > AVAIL_INTERPOSABLE)
2550 set_const_flag_1 (this, set_const, looping, &changed);
2551 else
2552 {
2553 ipa_ref *ref;
2554
2555 FOR_EACH_ALIAS (this, ref)
2556 {
2557 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2558 if (!set_const || alias->get_availability () > AVAIL_INTERPOSABLE)
2559 set_const_flag_1 (alias, set_const, looping, &changed);
2560 }
2561 }
2562 return changed;
2563 }
2564
2565 /* Info used by set_pure_flag_1. */
2566
2567 struct set_pure_flag_info
2568 {
2569 bool pure;
2570 bool looping;
2571 bool changed;
2572 };
2573
2574 /* Worker to set_pure_flag. */
2575
2576 static bool
2577 set_pure_flag_1 (cgraph_node *node, void *data)
2578 {
2579 struct set_pure_flag_info *info = (struct set_pure_flag_info *)data;
2580 /* Static constructors and destructors without a side effect can be
2581 optimized out. */
2582 if (info->pure && !info->looping)
2583 {
2584 if (DECL_STATIC_CONSTRUCTOR (node->decl))
2585 {
2586 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2587 info->changed = true;
2588 }
2589 if (DECL_STATIC_DESTRUCTOR (node->decl))
2590 {
2591 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2592 info->changed = true;
2593 }
2594 }
2595 if (info->pure)
2596 {
2597 if (!DECL_PURE_P (node->decl) && !TREE_READONLY (node->decl))
2598 {
2599 DECL_PURE_P (node->decl) = true;
2600 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = info->looping;
2601 info->changed = true;
2602 }
2603 else if (DECL_LOOPING_CONST_OR_PURE_P (node->decl)
2604 && !info->looping)
2605 {
2606 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2607 info->changed = true;
2608 }
2609 }
2610 else
2611 {
2612 if (DECL_PURE_P (node->decl))
2613 {
2614 DECL_PURE_P (node->decl) = false;
2615 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2616 info->changed = true;
2617 }
2618 }
2619 return false;
2620 }
2621
2622 /* Set DECL_PURE_P on cgraph_node's decl and on aliases of the node
2623 if any to PURE.
2624
2625 When setting the flag, be careful about possible interposition.
2626 Return true if any change was done. */
2627
2628 bool
2629 cgraph_node::set_pure_flag (bool pure, bool looping)
2630 {
2631 struct set_pure_flag_info info = {pure, looping, false};
2632 if (!pure)
2633 looping = false;
2634 call_for_symbol_thunks_and_aliases (set_pure_flag_1, &info, !pure, true);
2635 return info.changed;
2636 }
2637
2638 /* Return true when cgraph_node can not return or throw and thus
2639 it is safe to ignore its side effects for IPA analysis. */
2640
2641 bool
2642 cgraph_node::cannot_return_p (void)
2643 {
2644 int flags = flags_from_decl_or_type (decl);
2645 if (!opt_for_fn (decl, flag_exceptions))
2646 return (flags & ECF_NORETURN) != 0;
2647 else
2648 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2649 == (ECF_NORETURN | ECF_NOTHROW));
2650 }
2651
2652 /* Return true when call of edge can not lead to return from caller
2653 and thus it is safe to ignore its side effects for IPA analysis
2654 when computing side effects of the caller.
2655 FIXME: We could actually mark all edges that have no reaching
2656 patch to the exit block or throw to get better results. */
2657 bool
2658 cgraph_edge::cannot_lead_to_return_p (void)
2659 {
2660 if (caller->cannot_return_p ())
2661 return true;
2662 if (indirect_unknown_callee)
2663 {
2664 int flags = indirect_info->ecf_flags;
2665 if (!opt_for_fn (caller->decl, flag_exceptions))
2666 return (flags & ECF_NORETURN) != 0;
2667 else
2668 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2669 == (ECF_NORETURN | ECF_NOTHROW));
2670 }
2671 else
2672 return callee->cannot_return_p ();
2673 }
2674
2675 /* Return true if the call can be hot. */
2676
2677 bool
2678 cgraph_edge::maybe_hot_p (void)
2679 {
2680 /* TODO: Export profile_status from cfun->cfg to cgraph_node. */
2681 if (profile_info
2682 && opt_for_fn (caller->decl, flag_branch_probabilities)
2683 && !maybe_hot_count_p (NULL, count))
2684 return false;
2685 if (caller->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED
2686 || (callee
2687 && callee->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED))
2688 return false;
2689 if (caller->frequency > NODE_FREQUENCY_UNLIKELY_EXECUTED
2690 && (callee
2691 && callee->frequency <= NODE_FREQUENCY_EXECUTED_ONCE))
2692 return false;
2693 if (opt_for_fn (caller->decl, optimize_size))
2694 return false;
2695 if (caller->frequency == NODE_FREQUENCY_HOT)
2696 return true;
2697 if (caller->frequency == NODE_FREQUENCY_EXECUTED_ONCE
2698 && frequency < CGRAPH_FREQ_BASE * 3 / 2)
2699 return false;
2700 if (opt_for_fn (caller->decl, flag_guess_branch_prob))
2701 {
2702 if (PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION) == 0
2703 || frequency <= (CGRAPH_FREQ_BASE
2704 / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION)))
2705 return false;
2706 }
2707 return true;
2708 }
2709
2710 /* Worker for cgraph_can_remove_if_no_direct_calls_p. */
2711
2712 static bool
2713 nonremovable_p (cgraph_node *node, void *)
2714 {
2715 return !node->can_remove_if_no_direct_calls_and_refs_p ();
2716 }
2717
2718 /* Return true if whole comdat group can be removed if there are no direct
2719 calls to THIS. */
2720
2721 bool
2722 cgraph_node::can_remove_if_no_direct_calls_p (bool will_inline)
2723 {
2724 struct ipa_ref *ref;
2725
2726 /* For local symbols or non-comdat group it is the same as
2727 can_remove_if_no_direct_calls_p. */
2728 if (!externally_visible || !same_comdat_group)
2729 {
2730 if (DECL_EXTERNAL (decl))
2731 return true;
2732 if (address_taken)
2733 return false;
2734 return !call_for_symbol_and_aliases (nonremovable_p, NULL, true);
2735 }
2736
2737 if (will_inline && address_taken)
2738 return false;
2739
2740 /* Otheriwse check if we can remove the symbol itself and then verify
2741 that only uses of the comdat groups are direct call to THIS
2742 or its aliases. */
2743 if (!can_remove_if_no_direct_calls_and_refs_p ())
2744 return false;
2745
2746 /* Check that all refs come from within the comdat group. */
2747 for (int i = 0; iterate_referring (i, ref); i++)
2748 if (ref->referring->get_comdat_group () != get_comdat_group ())
2749 return false;
2750
2751 struct cgraph_node *target = ultimate_alias_target ();
2752 for (cgraph_node *next = dyn_cast<cgraph_node *> (same_comdat_group);
2753 next != this; next = dyn_cast<cgraph_node *> (next->same_comdat_group))
2754 {
2755 if (!externally_visible)
2756 continue;
2757 if (!next->alias
2758 && !next->can_remove_if_no_direct_calls_and_refs_p ())
2759 return false;
2760
2761 /* If we see different symbol than THIS, be sure to check calls. */
2762 if (next->ultimate_alias_target () != target)
2763 for (cgraph_edge *e = next->callers; e; e = e->next_caller)
2764 if (e->caller->get_comdat_group () != get_comdat_group ()
2765 || will_inline)
2766 return false;
2767
2768 /* If function is not being inlined, we care only about
2769 references outside of the comdat group. */
2770 if (!will_inline)
2771 for (int i = 0; next->iterate_referring (i, ref); i++)
2772 if (ref->referring->get_comdat_group () != get_comdat_group ())
2773 return false;
2774 }
2775 return true;
2776 }
2777
2778 /* Return true when function cgraph_node can be expected to be removed
2779 from program when direct calls in this compilation unit are removed.
2780
2781 As a special case COMDAT functions are
2782 cgraph_can_remove_if_no_direct_calls_p while the are not
2783 cgraph_only_called_directly_p (it is possible they are called from other
2784 unit)
2785
2786 This function behaves as cgraph_only_called_directly_p because eliminating
2787 all uses of COMDAT function does not make it necessarily disappear from
2788 the program unless we are compiling whole program or we do LTO. In this
2789 case we know we win since dynamic linking will not really discard the
2790 linkonce section. */
2791
2792 bool
2793 cgraph_node::will_be_removed_from_program_if_no_direct_calls_p
2794 (bool will_inline)
2795 {
2796 gcc_assert (!global.inlined_to);
2797 if (DECL_EXTERNAL (decl))
2798 return true;
2799
2800 if (!in_lto_p && !flag_whole_program)
2801 {
2802 /* If the symbol is in comdat group, we need to verify that whole comdat
2803 group becomes unreachable. Technically we could skip references from
2804 within the group, too. */
2805 if (!only_called_directly_p ())
2806 return false;
2807 if (same_comdat_group && externally_visible)
2808 {
2809 struct cgraph_node *target = ultimate_alias_target ();
2810
2811 if (will_inline && address_taken)
2812 return true;
2813 for (cgraph_node *next = dyn_cast<cgraph_node *> (same_comdat_group);
2814 next != this;
2815 next = dyn_cast<cgraph_node *> (next->same_comdat_group))
2816 {
2817 if (!externally_visible)
2818 continue;
2819 if (!next->alias
2820 && !next->only_called_directly_p ())
2821 return false;
2822
2823 /* If we see different symbol than THIS,
2824 be sure to check calls. */
2825 if (next->ultimate_alias_target () != target)
2826 for (cgraph_edge *e = next->callers; e; e = e->next_caller)
2827 if (e->caller->get_comdat_group () != get_comdat_group ()
2828 || will_inline)
2829 return false;
2830 }
2831 }
2832 return true;
2833 }
2834 else
2835 return can_remove_if_no_direct_calls_p (will_inline);
2836 }
2837
2838
2839 /* Worker for cgraph_only_called_directly_p. */
2840
2841 static bool
2842 cgraph_not_only_called_directly_p_1 (cgraph_node *node, void *)
2843 {
2844 return !node->only_called_directly_or_aliased_p ();
2845 }
2846
2847 /* Return true when function cgraph_node and all its aliases are only called
2848 directly.
2849 i.e. it is not externally visible, address was not taken and
2850 it is not used in any other non-standard way. */
2851
2852 bool
2853 cgraph_node::only_called_directly_p (void)
2854 {
2855 gcc_assert (ultimate_alias_target () == this);
2856 return !call_for_symbol_and_aliases (cgraph_not_only_called_directly_p_1,
2857 NULL, true);
2858 }
2859
2860
2861 /* Collect all callers of NODE. Worker for collect_callers_of_node. */
2862
2863 static bool
2864 collect_callers_of_node_1 (cgraph_node *node, void *data)
2865 {
2866 vec<cgraph_edge *> *redirect_callers = (vec<cgraph_edge *> *)data;
2867 cgraph_edge *cs;
2868 enum availability avail;
2869 node->ultimate_alias_target (&avail);
2870
2871 if (avail > AVAIL_INTERPOSABLE)
2872 for (cs = node->callers; cs != NULL; cs = cs->next_caller)
2873 if (!cs->indirect_inlining_edge
2874 && !cs->caller->thunk.thunk_p)
2875 redirect_callers->safe_push (cs);
2876 return false;
2877 }
2878
2879 /* Collect all callers of cgraph_node and its aliases that are known to lead to
2880 cgraph_node (i.e. are not overwritable). */
2881
2882 vec<cgraph_edge *>
2883 cgraph_node::collect_callers (void)
2884 {
2885 vec<cgraph_edge *> redirect_callers = vNULL;
2886 call_for_symbol_thunks_and_aliases (collect_callers_of_node_1,
2887 &redirect_callers, false);
2888 return redirect_callers;
2889 }
2890
2891 /* Return TRUE if NODE2 a clone of NODE or is equivalent to it. */
2892
2893 static bool
2894 clone_of_p (cgraph_node *node, cgraph_node *node2)
2895 {
2896 bool skipped_thunk = false;
2897 node = node->ultimate_alias_target ();
2898 node2 = node2->ultimate_alias_target ();
2899
2900 /* There are no virtual clones of thunks so check former_clone_of or if we
2901 might have skipped thunks because this adjustments are no longer
2902 necessary. */
2903 while (node->thunk.thunk_p)
2904 {
2905 if (node2->former_clone_of == node->decl)
2906 return true;
2907 if (!node->thunk.this_adjusting)
2908 return false;
2909 node = node->callees->callee->ultimate_alias_target ();
2910 skipped_thunk = true;
2911 }
2912
2913 if (skipped_thunk)
2914 {
2915 if (!node2->clone.args_to_skip
2916 || !bitmap_bit_p (node2->clone.args_to_skip, 0))
2917 return false;
2918 if (node2->former_clone_of == node->decl)
2919 return true;
2920 else if (!node2->clone_of)
2921 return false;
2922 }
2923
2924 while (node != node2 && node2)
2925 node2 = node2->clone_of;
2926 return node2 != NULL;
2927 }
2928
2929 /* Verify edge count and frequency. */
2930
2931 bool
2932 cgraph_edge::verify_count_and_frequency ()
2933 {
2934 bool error_found = false;
2935 if (count < 0)
2936 {
2937 error ("caller edge count is negative");
2938 error_found = true;
2939 }
2940 if (frequency < 0)
2941 {
2942 error ("caller edge frequency is negative");
2943 error_found = true;
2944 }
2945 if (frequency > CGRAPH_FREQ_MAX)
2946 {
2947 error ("caller edge frequency is too large");
2948 error_found = true;
2949 }
2950 return error_found;
2951 }
2952
2953 /* Switch to THIS_CFUN if needed and print STMT to stderr. */
2954 static void
2955 cgraph_debug_gimple_stmt (function *this_cfun, gimple *stmt)
2956 {
2957 bool fndecl_was_null = false;
2958 /* debug_gimple_stmt needs correct cfun */
2959 if (cfun != this_cfun)
2960 set_cfun (this_cfun);
2961 /* ...and an actual current_function_decl */
2962 if (!current_function_decl)
2963 {
2964 current_function_decl = this_cfun->decl;
2965 fndecl_was_null = true;
2966 }
2967 debug_gimple_stmt (stmt);
2968 if (fndecl_was_null)
2969 current_function_decl = NULL;
2970 }
2971
2972 /* Verify that call graph edge corresponds to DECL from the associated
2973 statement. Return true if the verification should fail. */
2974
2975 bool
2976 cgraph_edge::verify_corresponds_to_fndecl (tree decl)
2977 {
2978 cgraph_node *node;
2979
2980 if (!decl || callee->global.inlined_to)
2981 return false;
2982 if (symtab->state == LTO_STREAMING)
2983 return false;
2984 node = cgraph_node::get (decl);
2985
2986 /* We do not know if a node from a different partition is an alias or what it
2987 aliases and therefore cannot do the former_clone_of check reliably. When
2988 body_removed is set, we have lost all information about what was alias or
2989 thunk of and also cannot proceed. */
2990 if (!node
2991 || node->body_removed
2992 || node->in_other_partition
2993 || callee->icf_merged
2994 || callee->in_other_partition)
2995 return false;
2996
2997 node = node->ultimate_alias_target ();
2998
2999 /* Optimizers can redirect unreachable calls or calls triggering undefined
3000 behavior to builtin_unreachable. */
3001 if (DECL_BUILT_IN_CLASS (callee->decl) == BUILT_IN_NORMAL
3002 && DECL_FUNCTION_CODE (callee->decl) == BUILT_IN_UNREACHABLE)
3003 return false;
3004
3005 if (callee->former_clone_of != node->decl
3006 && (node != callee->ultimate_alias_target ())
3007 && !clone_of_p (node, callee))
3008 return true;
3009 else
3010 return false;
3011 }
3012
3013 /* Verify cgraph nodes of given cgraph node. */
3014 DEBUG_FUNCTION void
3015 cgraph_node::verify_node (void)
3016 {
3017 cgraph_edge *e;
3018 function *this_cfun = DECL_STRUCT_FUNCTION (decl);
3019 basic_block this_block;
3020 gimple_stmt_iterator gsi;
3021 bool error_found = false;
3022
3023 if (seen_error ())
3024 return;
3025
3026 timevar_push (TV_CGRAPH_VERIFY);
3027 error_found |= verify_base ();
3028 for (e = callees; e; e = e->next_callee)
3029 if (e->aux)
3030 {
3031 error ("aux field set for edge %s->%s",
3032 identifier_to_locale (e->caller->name ()),
3033 identifier_to_locale (e->callee->name ()));
3034 error_found = true;
3035 }
3036 if (count < 0)
3037 {
3038 error ("execution count is negative");
3039 error_found = true;
3040 }
3041 if (global.inlined_to && same_comdat_group)
3042 {
3043 error ("inline clone in same comdat group list");
3044 error_found = true;
3045 }
3046 if (!definition && !in_other_partition && local.local)
3047 {
3048 error ("local symbols must be defined");
3049 error_found = true;
3050 }
3051 if (global.inlined_to && externally_visible)
3052 {
3053 error ("externally visible inline clone");
3054 error_found = true;
3055 }
3056 if (global.inlined_to && address_taken)
3057 {
3058 error ("inline clone with address taken");
3059 error_found = true;
3060 }
3061 if (global.inlined_to && force_output)
3062 {
3063 error ("inline clone is forced to output");
3064 error_found = true;
3065 }
3066 for (e = indirect_calls; e; e = e->next_callee)
3067 {
3068 if (e->aux)
3069 {
3070 error ("aux field set for indirect edge from %s",
3071 identifier_to_locale (e->caller->name ()));
3072 error_found = true;
3073 }
3074 if (!e->indirect_unknown_callee
3075 || !e->indirect_info)
3076 {
3077 error ("An indirect edge from %s is not marked as indirect or has "
3078 "associated indirect_info, the corresponding statement is: ",
3079 identifier_to_locale (e->caller->name ()));
3080 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3081 error_found = true;
3082 }
3083 }
3084 bool check_comdat = comdat_local_p ();
3085 for (e = callers; e; e = e->next_caller)
3086 {
3087 if (e->verify_count_and_frequency ())
3088 error_found = true;
3089 if (check_comdat
3090 && !in_same_comdat_group_p (e->caller))
3091 {
3092 error ("comdat-local function called by %s outside its comdat",
3093 identifier_to_locale (e->caller->name ()));
3094 error_found = true;
3095 }
3096 if (!e->inline_failed)
3097 {
3098 if (global.inlined_to
3099 != (e->caller->global.inlined_to
3100 ? e->caller->global.inlined_to : e->caller))
3101 {
3102 error ("inlined_to pointer is wrong");
3103 error_found = true;
3104 }
3105 if (callers->next_caller)
3106 {
3107 error ("multiple inline callers");
3108 error_found = true;
3109 }
3110 }
3111 else
3112 if (global.inlined_to)
3113 {
3114 error ("inlined_to pointer set for noninline callers");
3115 error_found = true;
3116 }
3117 }
3118 for (e = callees; e; e = e->next_callee)
3119 {
3120 if (e->verify_count_and_frequency ())
3121 error_found = true;
3122 if (gimple_has_body_p (e->caller->decl)
3123 && !e->caller->global.inlined_to
3124 && !e->speculative
3125 /* Optimized out calls are redirected to __builtin_unreachable. */
3126 && (e->frequency
3127 || e->callee->decl
3128 != builtin_decl_implicit (BUILT_IN_UNREACHABLE))
3129 && (e->frequency
3130 != compute_call_stmt_bb_frequency (e->caller->decl,
3131 gimple_bb (e->call_stmt))))
3132 {
3133 error ("caller edge frequency %i does not match BB frequency %i",
3134 e->frequency,
3135 compute_call_stmt_bb_frequency (e->caller->decl,
3136 gimple_bb (e->call_stmt)));
3137 error_found = true;
3138 }
3139 }
3140 for (e = indirect_calls; e; e = e->next_callee)
3141 {
3142 if (e->verify_count_and_frequency ())
3143 error_found = true;
3144 if (gimple_has_body_p (e->caller->decl)
3145 && !e->caller->global.inlined_to
3146 && !e->speculative
3147 && (e->frequency
3148 != compute_call_stmt_bb_frequency (e->caller->decl,
3149 gimple_bb (e->call_stmt))))
3150 {
3151 error ("indirect call frequency %i does not match BB frequency %i",
3152 e->frequency,
3153 compute_call_stmt_bb_frequency (e->caller->decl,
3154 gimple_bb (e->call_stmt)));
3155 error_found = true;
3156 }
3157 }
3158 if (!callers && global.inlined_to)
3159 {
3160 error ("inlined_to pointer is set but no predecessors found");
3161 error_found = true;
3162 }
3163 if (global.inlined_to == this)
3164 {
3165 error ("inlined_to pointer refers to itself");
3166 error_found = true;
3167 }
3168
3169 if (clone_of)
3170 {
3171 cgraph_node *n;
3172 for (n = clone_of->clones; n; n = n->next_sibling_clone)
3173 if (n == this)
3174 break;
3175 if (!n)
3176 {
3177 error ("cgraph_node has wrong clone_of");
3178 error_found = true;
3179 }
3180 }
3181 if (clones)
3182 {
3183 cgraph_node *n;
3184 for (n = clones; n; n = n->next_sibling_clone)
3185 if (n->clone_of != this)
3186 break;
3187 if (n)
3188 {
3189 error ("cgraph_node has wrong clone list");
3190 error_found = true;
3191 }
3192 }
3193 if ((prev_sibling_clone || next_sibling_clone) && !clone_of)
3194 {
3195 error ("cgraph_node is in clone list but it is not clone");
3196 error_found = true;
3197 }
3198 if (!prev_sibling_clone && clone_of && clone_of->clones != this)
3199 {
3200 error ("cgraph_node has wrong prev_clone pointer");
3201 error_found = true;
3202 }
3203 if (prev_sibling_clone && prev_sibling_clone->next_sibling_clone != this)
3204 {
3205 error ("double linked list of clones corrupted");
3206 error_found = true;
3207 }
3208
3209 if (analyzed && alias)
3210 {
3211 bool ref_found = false;
3212 int i;
3213 ipa_ref *ref = NULL;
3214
3215 if (callees)
3216 {
3217 error ("Alias has call edges");
3218 error_found = true;
3219 }
3220 for (i = 0; iterate_reference (i, ref); i++)
3221 if (ref->use == IPA_REF_CHKP)
3222 ;
3223 else if (ref->use != IPA_REF_ALIAS)
3224 {
3225 error ("Alias has non-alias reference");
3226 error_found = true;
3227 }
3228 else if (ref_found)
3229 {
3230 error ("Alias has more than one alias reference");
3231 error_found = true;
3232 }
3233 else
3234 ref_found = true;
3235 if (!ref_found)
3236 {
3237 error ("Analyzed alias has no reference");
3238 error_found = true;
3239 }
3240 }
3241
3242 /* Check instrumented version reference. */
3243 if (instrumented_version
3244 && instrumented_version->instrumented_version != this)
3245 {
3246 error ("Instrumentation clone does not reference original node");
3247 error_found = true;
3248 }
3249
3250 /* Cannot have orig_decl for not instrumented nodes. */
3251 if (!instrumentation_clone && orig_decl)
3252 {
3253 error ("Not instrumented node has non-NULL original declaration");
3254 error_found = true;
3255 }
3256
3257 /* If original not instrumented node still exists then we may check
3258 original declaration is set properly. */
3259 if (instrumented_version
3260 && orig_decl
3261 && orig_decl != instrumented_version->decl)
3262 {
3263 error ("Instrumented node has wrong original declaration");
3264 error_found = true;
3265 }
3266
3267 /* Check all nodes have chkp reference to their instrumented versions. */
3268 if (analyzed
3269 && instrumented_version
3270 && !instrumentation_clone)
3271 {
3272 bool ref_found = false;
3273 int i;
3274 struct ipa_ref *ref;
3275
3276 for (i = 0; iterate_reference (i, ref); i++)
3277 if (ref->use == IPA_REF_CHKP)
3278 {
3279 if (ref_found)
3280 {
3281 error ("Node has more than one chkp reference");
3282 error_found = true;
3283 }
3284 if (ref->referred != instrumented_version)
3285 {
3286 error ("Wrong node is referenced with chkp reference");
3287 error_found = true;
3288 }
3289 ref_found = true;
3290 }
3291
3292 if (!ref_found)
3293 {
3294 error ("Analyzed node has no reference to instrumented version");
3295 error_found = true;
3296 }
3297 }
3298
3299 if (instrumentation_clone
3300 && DECL_BUILT_IN_CLASS (decl) == NOT_BUILT_IN)
3301 {
3302 tree name = DECL_ASSEMBLER_NAME (decl);
3303 tree orig_name = DECL_ASSEMBLER_NAME (orig_decl);
3304
3305 if (!IDENTIFIER_TRANSPARENT_ALIAS (name)
3306 || TREE_CHAIN (name) != orig_name)
3307 {
3308 error ("Alias chain for instrumented node is broken");
3309 error_found = true;
3310 }
3311 }
3312
3313 if (analyzed && thunk.thunk_p)
3314 {
3315 if (!callees)
3316 {
3317 error ("No edge out of thunk node");
3318 error_found = true;
3319 }
3320 else if (callees->next_callee)
3321 {
3322 error ("More than one edge out of thunk node");
3323 error_found = true;
3324 }
3325 if (gimple_has_body_p (decl) && !global.inlined_to)
3326 {
3327 error ("Thunk is not supposed to have body");
3328 error_found = true;
3329 }
3330 if (thunk.add_pointer_bounds_args
3331 && !instrumented_version->semantically_equivalent_p (callees->callee))
3332 {
3333 error ("Instrumentation thunk has wrong edge callee");
3334 error_found = true;
3335 }
3336 }
3337 else if (analyzed && gimple_has_body_p (decl)
3338 && !TREE_ASM_WRITTEN (decl)
3339 && (!DECL_EXTERNAL (decl) || global.inlined_to)
3340 && !flag_wpa)
3341 {
3342 if (this_cfun->cfg)
3343 {
3344 hash_set<gimple *> stmts;
3345 int i;
3346 ipa_ref *ref = NULL;
3347
3348 /* Reach the trees by walking over the CFG, and note the
3349 enclosing basic-blocks in the call edges. */
3350 FOR_EACH_BB_FN (this_block, this_cfun)
3351 {
3352 for (gsi = gsi_start_phis (this_block);
3353 !gsi_end_p (gsi); gsi_next (&gsi))
3354 stmts.add (gsi_stmt (gsi));
3355 for (gsi = gsi_start_bb (this_block);
3356 !gsi_end_p (gsi);
3357 gsi_next (&gsi))
3358 {
3359 gimple *stmt = gsi_stmt (gsi);
3360 stmts.add (stmt);
3361 if (is_gimple_call (stmt))
3362 {
3363 cgraph_edge *e = get_edge (stmt);
3364 tree decl = gimple_call_fndecl (stmt);
3365 if (e)
3366 {
3367 if (e->aux)
3368 {
3369 error ("shared call_stmt:");
3370 cgraph_debug_gimple_stmt (this_cfun, stmt);
3371 error_found = true;
3372 }
3373 if (!e->indirect_unknown_callee)
3374 {
3375 if (e->verify_corresponds_to_fndecl (decl))
3376 {
3377 error ("edge points to wrong declaration:");
3378 debug_tree (e->callee->decl);
3379 fprintf (stderr," Instead of:");
3380 debug_tree (decl);
3381 error_found = true;
3382 }
3383 }
3384 else if (decl)
3385 {
3386 error ("an indirect edge with unknown callee "
3387 "corresponding to a call_stmt with "
3388 "a known declaration:");
3389 error_found = true;
3390 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3391 }
3392 e->aux = (void *)1;
3393 }
3394 else if (decl)
3395 {
3396 error ("missing callgraph edge for call stmt:");
3397 cgraph_debug_gimple_stmt (this_cfun, stmt);
3398 error_found = true;
3399 }
3400 }
3401 }
3402 }
3403 for (i = 0; iterate_reference (i, ref); i++)
3404 if (ref->stmt && !stmts.contains (ref->stmt))
3405 {
3406 error ("reference to dead statement");
3407 cgraph_debug_gimple_stmt (this_cfun, ref->stmt);
3408 error_found = true;
3409 }
3410 }
3411 else
3412 /* No CFG available?! */
3413 gcc_unreachable ();
3414
3415 for (e = callees; e; e = e->next_callee)
3416 {
3417 if (!e->aux)
3418 {
3419 error ("edge %s->%s has no corresponding call_stmt",
3420 identifier_to_locale (e->caller->name ()),
3421 identifier_to_locale (e->callee->name ()));
3422 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3423 error_found = true;
3424 }
3425 e->aux = 0;
3426 }
3427 for (e = indirect_calls; e; e = e->next_callee)
3428 {
3429 if (!e->aux && !e->speculative)
3430 {
3431 error ("an indirect edge from %s has no corresponding call_stmt",
3432 identifier_to_locale (e->caller->name ()));
3433 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3434 error_found = true;
3435 }
3436 e->aux = 0;
3437 }
3438 }
3439 if (error_found)
3440 {
3441 dump (stderr);
3442 internal_error ("verify_cgraph_node failed");
3443 }
3444 timevar_pop (TV_CGRAPH_VERIFY);
3445 }
3446
3447 /* Verify whole cgraph structure. */
3448 DEBUG_FUNCTION void
3449 cgraph_node::verify_cgraph_nodes (void)
3450 {
3451 cgraph_node *node;
3452
3453 if (seen_error ())
3454 return;
3455
3456 FOR_EACH_FUNCTION (node)
3457 node->verify ();
3458 }
3459
3460 /* Walk the alias chain to return the function cgraph_node is alias of.
3461 Walk through thunks, too.
3462 When AVAILABILITY is non-NULL, get minimal availability in the chain.
3463 When REF is non-NULL, assume that reference happens in symbol REF
3464 when determining the availability. */
3465
3466 cgraph_node *
3467 cgraph_node::function_symbol (enum availability *availability,
3468 struct symtab_node *ref)
3469 {
3470 cgraph_node *node = ultimate_alias_target (availability, ref);
3471
3472 while (node->thunk.thunk_p)
3473 {
3474 ref = node;
3475 node = node->callees->callee;
3476 if (availability)
3477 {
3478 enum availability a;
3479 a = node->get_availability (ref);
3480 if (a < *availability)
3481 *availability = a;
3482 }
3483 node = node->ultimate_alias_target (availability, ref);
3484 }
3485 return node;
3486 }
3487
3488 /* Walk the alias chain to return the function cgraph_node is alias of.
3489 Walk through non virtual thunks, too. Thus we return either a function
3490 or a virtual thunk node.
3491 When AVAILABILITY is non-NULL, get minimal availability in the chain.
3492 When REF is non-NULL, assume that reference happens in symbol REF
3493 when determining the availability. */
3494
3495 cgraph_node *
3496 cgraph_node::function_or_virtual_thunk_symbol
3497 (enum availability *availability,
3498 struct symtab_node *ref)
3499 {
3500 cgraph_node *node = ultimate_alias_target (availability, ref);
3501
3502 while (node->thunk.thunk_p && !node->thunk.virtual_offset_p)
3503 {
3504 ref = node;
3505 node = node->callees->callee;
3506 if (availability)
3507 {
3508 enum availability a;
3509 a = node->get_availability (ref);
3510 if (a < *availability)
3511 *availability = a;
3512 }
3513 node = node->ultimate_alias_target (availability, ref);
3514 }
3515 return node;
3516 }
3517
3518 /* When doing LTO, read cgraph_node's body from disk if it is not already
3519 present. */
3520
3521 bool
3522 cgraph_node::get_untransformed_body (void)
3523 {
3524 lto_file_decl_data *file_data;
3525 const char *data, *name;
3526 size_t len;
3527 tree decl = this->decl;
3528
3529 /* Check if body is already there. Either we have gimple body or
3530 the function is thunk and in that case we set DECL_ARGUMENTS. */
3531 if (DECL_ARGUMENTS (decl) || gimple_has_body_p (decl))
3532 return false;
3533
3534 gcc_assert (in_lto_p && !DECL_RESULT (decl));
3535
3536 timevar_push (TV_IPA_LTO_GIMPLE_IN);
3537
3538 file_data = lto_file_data;
3539 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
3540
3541 /* We may have renamed the declaration, e.g., a static function. */
3542 name = lto_get_decl_name_mapping (file_data, name);
3543 struct lto_in_decl_state *decl_state
3544 = lto_get_function_in_decl_state (file_data, decl);
3545
3546 data = lto_get_section_data (file_data, LTO_section_function_body,
3547 name, &len, decl_state->compressed);
3548 if (!data)
3549 fatal_error (input_location, "%s: section %s is missing",
3550 file_data->file_name,
3551 name);
3552
3553 gcc_assert (DECL_STRUCT_FUNCTION (decl) == NULL);
3554
3555 lto_input_function_body (file_data, this, data);
3556 lto_stats.num_function_bodies++;
3557 lto_free_section_data (file_data, LTO_section_function_body, name,
3558 data, len, decl_state->compressed);
3559 lto_free_function_in_decl_state_for_node (this);
3560 /* Keep lto file data so ipa-inline-analysis knows about cross module
3561 inlining. */
3562
3563 timevar_pop (TV_IPA_LTO_GIMPLE_IN);
3564
3565 return true;
3566 }
3567
3568 /* Prepare function body. When doing LTO, read cgraph_node's body from disk
3569 if it is not already present. When some IPA transformations are scheduled,
3570 apply them. */
3571
3572 bool
3573 cgraph_node::get_body (void)
3574 {
3575 bool updated;
3576
3577 updated = get_untransformed_body ();
3578
3579 /* Getting transformed body makes no sense for inline clones;
3580 we should never use this on real clones because they are materialized
3581 early.
3582 TODO: Materializing clones here will likely lead to smaller LTRANS
3583 footprint. */
3584 gcc_assert (!global.inlined_to && !clone_of);
3585 if (ipa_transforms_to_apply.exists ())
3586 {
3587 opt_pass *saved_current_pass = current_pass;
3588 FILE *saved_dump_file = dump_file;
3589 const char *saved_dump_file_name = dump_file_name;
3590 int saved_dump_flags = dump_flags;
3591 dump_file_name = NULL;
3592 dump_file = NULL;
3593
3594 push_cfun (DECL_STRUCT_FUNCTION (decl));
3595 execute_all_ipa_transforms ();
3596 cgraph_edge::rebuild_edges ();
3597 free_dominance_info (CDI_DOMINATORS);
3598 free_dominance_info (CDI_POST_DOMINATORS);
3599 pop_cfun ();
3600 updated = true;
3601
3602 current_pass = saved_current_pass;
3603 dump_file = saved_dump_file;
3604 dump_file_name = saved_dump_file_name;
3605 dump_flags = saved_dump_flags;
3606 }
3607 return updated;
3608 }
3609
3610 /* Return the DECL_STRUCT_FUNCTION of the function. */
3611
3612 struct function *
3613 cgraph_node::get_fun (void)
3614 {
3615 cgraph_node *node = this;
3616 struct function *fun = DECL_STRUCT_FUNCTION (node->decl);
3617
3618 while (!fun && node->clone_of)
3619 {
3620 node = node->clone_of;
3621 fun = DECL_STRUCT_FUNCTION (node->decl);
3622 }
3623
3624 return fun;
3625 }
3626
3627 /* Verify if the type of the argument matches that of the function
3628 declaration. If we cannot verify this or there is a mismatch,
3629 return false. */
3630
3631 static bool
3632 gimple_check_call_args (gimple *stmt, tree fndecl, bool args_count_match)
3633 {
3634 tree parms, p;
3635 unsigned int i, nargs;
3636
3637 /* Calls to internal functions always match their signature. */
3638 if (gimple_call_internal_p (stmt))
3639 return true;
3640
3641 nargs = gimple_call_num_args (stmt);
3642
3643 /* Get argument types for verification. */
3644 if (fndecl)
3645 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3646 else
3647 parms = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
3648
3649 /* Verify if the type of the argument matches that of the function
3650 declaration. If we cannot verify this or there is a mismatch,
3651 return false. */
3652 if (fndecl && DECL_ARGUMENTS (fndecl))
3653 {
3654 for (i = 0, p = DECL_ARGUMENTS (fndecl);
3655 i < nargs;
3656 i++, p = DECL_CHAIN (p))
3657 {
3658 tree arg;
3659 /* We cannot distinguish a varargs function from the case
3660 of excess parameters, still deferring the inlining decision
3661 to the callee is possible. */
3662 if (!p)
3663 break;
3664 arg = gimple_call_arg (stmt, i);
3665 if (p == error_mark_node
3666 || DECL_ARG_TYPE (p) == error_mark_node
3667 || arg == error_mark_node
3668 || (!types_compatible_p (DECL_ARG_TYPE (p), TREE_TYPE (arg))
3669 && !fold_convertible_p (DECL_ARG_TYPE (p), arg)))
3670 return false;
3671 }
3672 if (args_count_match && p)
3673 return false;
3674 }
3675 else if (parms)
3676 {
3677 for (i = 0, p = parms; i < nargs; i++, p = TREE_CHAIN (p))
3678 {
3679 tree arg;
3680 /* If this is a varargs function defer inlining decision
3681 to callee. */
3682 if (!p)
3683 break;
3684 arg = gimple_call_arg (stmt, i);
3685 if (TREE_VALUE (p) == error_mark_node
3686 || arg == error_mark_node
3687 || TREE_CODE (TREE_VALUE (p)) == VOID_TYPE
3688 || (!types_compatible_p (TREE_VALUE (p), TREE_TYPE (arg))
3689 && !fold_convertible_p (TREE_VALUE (p), arg)))
3690 return false;
3691 }
3692 }
3693 else
3694 {
3695 if (nargs != 0)
3696 return false;
3697 }
3698 return true;
3699 }
3700
3701 /* Verify if the type of the argument and lhs of CALL_STMT matches
3702 that of the function declaration CALLEE. If ARGS_COUNT_MATCH is
3703 true, the arg count needs to be the same.
3704 If we cannot verify this or there is a mismatch, return false. */
3705
3706 bool
3707 gimple_check_call_matching_types (gimple *call_stmt, tree callee,
3708 bool args_count_match)
3709 {
3710 tree lhs;
3711
3712 if ((DECL_RESULT (callee)
3713 && !DECL_BY_REFERENCE (DECL_RESULT (callee))
3714 && (lhs = gimple_call_lhs (call_stmt)) != NULL_TREE
3715 && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee)),
3716 TREE_TYPE (lhs))
3717 && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee)), lhs))
3718 || !gimple_check_call_args (call_stmt, callee, args_count_match))
3719 return false;
3720 return true;
3721 }
3722
3723 /* Reset all state within cgraph.c so that we can rerun the compiler
3724 within the same process. For use by toplev::finalize. */
3725
3726 void
3727 cgraph_c_finalize (void)
3728 {
3729 symtab = NULL;
3730
3731 x_cgraph_nodes_queue = NULL;
3732
3733 cgraph_fnver_htab = NULL;
3734 version_info_node = NULL;
3735 }
3736
3737 /* A wroker for call_for_symbol_and_aliases. */
3738
3739 bool
3740 cgraph_node::call_for_symbol_and_aliases_1 (bool (*callback) (cgraph_node *,
3741 void *),
3742 void *data,
3743 bool include_overwritable)
3744 {
3745 ipa_ref *ref;
3746 FOR_EACH_ALIAS (this, ref)
3747 {
3748 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
3749 if (include_overwritable
3750 || alias->get_availability () > AVAIL_INTERPOSABLE)
3751 if (alias->call_for_symbol_and_aliases (callback, data,
3752 include_overwritable))
3753 return true;
3754 }
3755 return false;
3756 }
3757
3758 /* Return true if NODE has thunk. */
3759
3760 bool
3761 cgraph_node::has_thunk_p (cgraph_node *node, void *)
3762 {
3763 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
3764 if (e->caller->thunk.thunk_p)
3765 return true;
3766 return false;
3767 }
3768
3769 #include "gt-cgraph.h"