re PR c++/70018 (Possible issue around IPO and C++ comdats discovered as pure/const)
[gcc.git] / gcc / cgraph.c
1 /* Callgraph handling code.
2 Copyright (C) 2003-2016 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This file contains basic routines manipulating call graph
22
23 The call-graph is a data structure designed for intra-procedural optimization.
24 It represents a multi-graph where nodes are functions and edges are call sites. */
25
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "backend.h"
30 #include "target.h"
31 #include "rtl.h"
32 #include "tree.h"
33 #include "gimple.h"
34 #include "predict.h"
35 #include "alloc-pool.h"
36 #include "gimple-ssa.h"
37 #include "cgraph.h"
38 #include "lto-streamer.h"
39 #include "fold-const.h"
40 #include "varasm.h"
41 #include "calls.h"
42 #include "print-tree.h"
43 #include "langhooks.h"
44 #include "intl.h"
45 #include "tree-eh.h"
46 #include "gimple-iterator.h"
47 #include "tree-cfg.h"
48 #include "tree-ssa.h"
49 #include "value-prof.h"
50 #include "ipa-utils.h"
51 #include "symbol-summary.h"
52 #include "ipa-prop.h"
53 #include "ipa-inline.h"
54 #include "cfgloop.h"
55 #include "gimple-pretty-print.h"
56 #include "tree-dfa.h"
57 #include "profile.h"
58 #include "params.h"
59 #include "tree-chkp.h"
60 #include "context.h"
61 #include "gimplify.h"
62
63 /* FIXME: Only for PROP_loops, but cgraph shouldn't have to know about this. */
64 #include "tree-pass.h"
65
66 /* Queue of cgraph nodes scheduled to be lowered. */
67 symtab_node *x_cgraph_nodes_queue;
68 #define cgraph_nodes_queue ((cgraph_node *)x_cgraph_nodes_queue)
69
70 /* Symbol table global context. */
71 symbol_table *symtab;
72
73 /* List of hooks triggered on cgraph_edge events. */
74 struct cgraph_edge_hook_list {
75 cgraph_edge_hook hook;
76 void *data;
77 struct cgraph_edge_hook_list *next;
78 };
79
80 /* List of hooks triggered on cgraph_node events. */
81 struct cgraph_node_hook_list {
82 cgraph_node_hook hook;
83 void *data;
84 struct cgraph_node_hook_list *next;
85 };
86
87 /* List of hooks triggered on events involving two cgraph_edges. */
88 struct cgraph_2edge_hook_list {
89 cgraph_2edge_hook hook;
90 void *data;
91 struct cgraph_2edge_hook_list *next;
92 };
93
94 /* List of hooks triggered on events involving two cgraph_nodes. */
95 struct cgraph_2node_hook_list {
96 cgraph_2node_hook hook;
97 void *data;
98 struct cgraph_2node_hook_list *next;
99 };
100
101 /* Hash descriptor for cgraph_function_version_info. */
102
103 struct function_version_hasher : ggc_ptr_hash<cgraph_function_version_info>
104 {
105 static hashval_t hash (cgraph_function_version_info *);
106 static bool equal (cgraph_function_version_info *,
107 cgraph_function_version_info *);
108 };
109
110 /* Map a cgraph_node to cgraph_function_version_info using this htab.
111 The cgraph_function_version_info has a THIS_NODE field that is the
112 corresponding cgraph_node.. */
113
114 static GTY(()) hash_table<function_version_hasher> *cgraph_fnver_htab = NULL;
115
116 /* Hash function for cgraph_fnver_htab. */
117 hashval_t
118 function_version_hasher::hash (cgraph_function_version_info *ptr)
119 {
120 int uid = ptr->this_node->uid;
121 return (hashval_t)(uid);
122 }
123
124 /* eq function for cgraph_fnver_htab. */
125 bool
126 function_version_hasher::equal (cgraph_function_version_info *n1,
127 cgraph_function_version_info *n2)
128 {
129 return n1->this_node->uid == n2->this_node->uid;
130 }
131
132 /* Mark as GC root all allocated nodes. */
133 static GTY(()) struct cgraph_function_version_info *
134 version_info_node = NULL;
135
136 /* Return true if NODE's address can be compared. */
137
138 bool
139 symtab_node::address_can_be_compared_p ()
140 {
141 /* Address of virtual tables and functions is never compared. */
142 if (DECL_VIRTUAL_P (decl))
143 return false;
144 /* Address of C++ cdtors is never compared. */
145 if (is_a <cgraph_node *> (this)
146 && (DECL_CXX_CONSTRUCTOR_P (decl)
147 || DECL_CXX_DESTRUCTOR_P (decl)))
148 return false;
149 /* Constant pool symbols addresses are never compared.
150 flag_merge_constants permits us to assume the same on readonly vars. */
151 if (is_a <varpool_node *> (this)
152 && (DECL_IN_CONSTANT_POOL (decl)
153 || (flag_merge_constants >= 2
154 && TREE_READONLY (decl) && !TREE_THIS_VOLATILE (decl))))
155 return false;
156 return true;
157 }
158
159 /* Get the cgraph_function_version_info node corresponding to node. */
160 cgraph_function_version_info *
161 cgraph_node::function_version (void)
162 {
163 cgraph_function_version_info key;
164 key.this_node = this;
165
166 if (cgraph_fnver_htab == NULL)
167 return NULL;
168
169 return cgraph_fnver_htab->find (&key);
170 }
171
172 /* Insert a new cgraph_function_version_info node into cgraph_fnver_htab
173 corresponding to cgraph_node NODE. */
174 cgraph_function_version_info *
175 cgraph_node::insert_new_function_version (void)
176 {
177 version_info_node = NULL;
178 version_info_node = ggc_cleared_alloc<cgraph_function_version_info> ();
179 version_info_node->this_node = this;
180
181 if (cgraph_fnver_htab == NULL)
182 cgraph_fnver_htab = hash_table<function_version_hasher>::create_ggc (2);
183
184 *cgraph_fnver_htab->find_slot (version_info_node, INSERT)
185 = version_info_node;
186 return version_info_node;
187 }
188
189 /* Remove the cgraph_function_version_info and cgraph_node for DECL. This
190 DECL is a duplicate declaration. */
191 void
192 cgraph_node::delete_function_version (tree decl)
193 {
194 cgraph_node *decl_node = cgraph_node::get (decl);
195 cgraph_function_version_info *decl_v = NULL;
196
197 if (decl_node == NULL)
198 return;
199
200 decl_v = decl_node->function_version ();
201
202 if (decl_v == NULL)
203 return;
204
205 if (decl_v->prev != NULL)
206 decl_v->prev->next = decl_v->next;
207
208 if (decl_v->next != NULL)
209 decl_v->next->prev = decl_v->prev;
210
211 if (cgraph_fnver_htab != NULL)
212 cgraph_fnver_htab->remove_elt (decl_v);
213
214 decl_node->remove ();
215 }
216
217 /* Record that DECL1 and DECL2 are semantically identical function
218 versions. */
219 void
220 cgraph_node::record_function_versions (tree decl1, tree decl2)
221 {
222 cgraph_node *decl1_node = cgraph_node::get_create (decl1);
223 cgraph_node *decl2_node = cgraph_node::get_create (decl2);
224 cgraph_function_version_info *decl1_v = NULL;
225 cgraph_function_version_info *decl2_v = NULL;
226 cgraph_function_version_info *before;
227 cgraph_function_version_info *after;
228
229 gcc_assert (decl1_node != NULL && decl2_node != NULL);
230 decl1_v = decl1_node->function_version ();
231 decl2_v = decl2_node->function_version ();
232
233 if (decl1_v != NULL && decl2_v != NULL)
234 return;
235
236 if (decl1_v == NULL)
237 decl1_v = decl1_node->insert_new_function_version ();
238
239 if (decl2_v == NULL)
240 decl2_v = decl2_node->insert_new_function_version ();
241
242 /* Chain decl2_v and decl1_v. All semantically identical versions
243 will be chained together. */
244
245 before = decl1_v;
246 after = decl2_v;
247
248 while (before->next != NULL)
249 before = before->next;
250
251 while (after->prev != NULL)
252 after= after->prev;
253
254 before->next = after;
255 after->prev = before;
256 }
257
258 /* Initialize callgraph dump file. */
259
260 void
261 symbol_table::initialize (void)
262 {
263 if (!dump_file)
264 dump_file = dump_begin (TDI_cgraph, NULL);
265 }
266
267 /* Allocate new callgraph node and insert it into basic data structures. */
268
269 cgraph_node *
270 symbol_table::create_empty (void)
271 {
272 cgraph_node *node = allocate_cgraph_symbol ();
273
274 node->type = SYMTAB_FUNCTION;
275 node->frequency = NODE_FREQUENCY_NORMAL;
276 node->count_materialization_scale = REG_BR_PROB_BASE;
277 cgraph_count++;
278
279 return node;
280 }
281
282 /* Register HOOK to be called with DATA on each removed edge. */
283 cgraph_edge_hook_list *
284 symbol_table::add_edge_removal_hook (cgraph_edge_hook hook, void *data)
285 {
286 cgraph_edge_hook_list *entry;
287 cgraph_edge_hook_list **ptr = &m_first_edge_removal_hook;
288
289 entry = (cgraph_edge_hook_list *) xmalloc (sizeof (*entry));
290 entry->hook = hook;
291 entry->data = data;
292 entry->next = NULL;
293 while (*ptr)
294 ptr = &(*ptr)->next;
295 *ptr = entry;
296 return entry;
297 }
298
299 /* Remove ENTRY from the list of hooks called on removing edges. */
300 void
301 symbol_table::remove_edge_removal_hook (cgraph_edge_hook_list *entry)
302 {
303 cgraph_edge_hook_list **ptr = &m_first_edge_removal_hook;
304
305 while (*ptr != entry)
306 ptr = &(*ptr)->next;
307 *ptr = entry->next;
308 free (entry);
309 }
310
311 /* Call all edge removal hooks. */
312 void
313 symbol_table::call_edge_removal_hooks (cgraph_edge *e)
314 {
315 cgraph_edge_hook_list *entry = m_first_edge_removal_hook;
316 while (entry)
317 {
318 entry->hook (e, entry->data);
319 entry = entry->next;
320 }
321 }
322
323 /* Register HOOK to be called with DATA on each removed node. */
324 cgraph_node_hook_list *
325 symbol_table::add_cgraph_removal_hook (cgraph_node_hook hook, void *data)
326 {
327 cgraph_node_hook_list *entry;
328 cgraph_node_hook_list **ptr = &m_first_cgraph_removal_hook;
329
330 entry = (cgraph_node_hook_list *) xmalloc (sizeof (*entry));
331 entry->hook = hook;
332 entry->data = data;
333 entry->next = NULL;
334 while (*ptr)
335 ptr = &(*ptr)->next;
336 *ptr = entry;
337 return entry;
338 }
339
340 /* Remove ENTRY from the list of hooks called on removing nodes. */
341 void
342 symbol_table::remove_cgraph_removal_hook (cgraph_node_hook_list *entry)
343 {
344 cgraph_node_hook_list **ptr = &m_first_cgraph_removal_hook;
345
346 while (*ptr != entry)
347 ptr = &(*ptr)->next;
348 *ptr = entry->next;
349 free (entry);
350 }
351
352 /* Call all node removal hooks. */
353 void
354 symbol_table::call_cgraph_removal_hooks (cgraph_node *node)
355 {
356 cgraph_node_hook_list *entry = m_first_cgraph_removal_hook;
357 while (entry)
358 {
359 entry->hook (node, entry->data);
360 entry = entry->next;
361 }
362 }
363
364 /* Call all node removal hooks. */
365 void
366 symbol_table::call_cgraph_insertion_hooks (cgraph_node *node)
367 {
368 cgraph_node_hook_list *entry = m_first_cgraph_insertion_hook;
369 while (entry)
370 {
371 entry->hook (node, entry->data);
372 entry = entry->next;
373 }
374 }
375
376
377 /* Register HOOK to be called with DATA on each inserted node. */
378 cgraph_node_hook_list *
379 symbol_table::add_cgraph_insertion_hook (cgraph_node_hook hook, void *data)
380 {
381 cgraph_node_hook_list *entry;
382 cgraph_node_hook_list **ptr = &m_first_cgraph_insertion_hook;
383
384 entry = (cgraph_node_hook_list *) xmalloc (sizeof (*entry));
385 entry->hook = hook;
386 entry->data = data;
387 entry->next = NULL;
388 while (*ptr)
389 ptr = &(*ptr)->next;
390 *ptr = entry;
391 return entry;
392 }
393
394 /* Remove ENTRY from the list of hooks called on inserted nodes. */
395 void
396 symbol_table::remove_cgraph_insertion_hook (cgraph_node_hook_list *entry)
397 {
398 cgraph_node_hook_list **ptr = &m_first_cgraph_insertion_hook;
399
400 while (*ptr != entry)
401 ptr = &(*ptr)->next;
402 *ptr = entry->next;
403 free (entry);
404 }
405
406 /* Register HOOK to be called with DATA on each duplicated edge. */
407 cgraph_2edge_hook_list *
408 symbol_table::add_edge_duplication_hook (cgraph_2edge_hook hook, void *data)
409 {
410 cgraph_2edge_hook_list *entry;
411 cgraph_2edge_hook_list **ptr = &m_first_edge_duplicated_hook;
412
413 entry = (cgraph_2edge_hook_list *) xmalloc (sizeof (*entry));
414 entry->hook = hook;
415 entry->data = data;
416 entry->next = NULL;
417 while (*ptr)
418 ptr = &(*ptr)->next;
419 *ptr = entry;
420 return entry;
421 }
422
423 /* Remove ENTRY from the list of hooks called on duplicating edges. */
424 void
425 symbol_table::remove_edge_duplication_hook (cgraph_2edge_hook_list *entry)
426 {
427 cgraph_2edge_hook_list **ptr = &m_first_edge_duplicated_hook;
428
429 while (*ptr != entry)
430 ptr = &(*ptr)->next;
431 *ptr = entry->next;
432 free (entry);
433 }
434
435 /* Call all edge duplication hooks. */
436 void
437 symbol_table::call_edge_duplication_hooks (cgraph_edge *cs1, cgraph_edge *cs2)
438 {
439 cgraph_2edge_hook_list *entry = m_first_edge_duplicated_hook;
440 while (entry)
441 {
442 entry->hook (cs1, cs2, entry->data);
443 entry = entry->next;
444 }
445 }
446
447 /* Register HOOK to be called with DATA on each duplicated node. */
448 cgraph_2node_hook_list *
449 symbol_table::add_cgraph_duplication_hook (cgraph_2node_hook hook, void *data)
450 {
451 cgraph_2node_hook_list *entry;
452 cgraph_2node_hook_list **ptr = &m_first_cgraph_duplicated_hook;
453
454 entry = (cgraph_2node_hook_list *) xmalloc (sizeof (*entry));
455 entry->hook = hook;
456 entry->data = data;
457 entry->next = NULL;
458 while (*ptr)
459 ptr = &(*ptr)->next;
460 *ptr = entry;
461 return entry;
462 }
463
464 /* Remove ENTRY from the list of hooks called on duplicating nodes. */
465 void
466 symbol_table::remove_cgraph_duplication_hook (cgraph_2node_hook_list *entry)
467 {
468 cgraph_2node_hook_list **ptr = &m_first_cgraph_duplicated_hook;
469
470 while (*ptr != entry)
471 ptr = &(*ptr)->next;
472 *ptr = entry->next;
473 free (entry);
474 }
475
476 /* Call all node duplication hooks. */
477 void
478 symbol_table::call_cgraph_duplication_hooks (cgraph_node *node,
479 cgraph_node *node2)
480 {
481 cgraph_2node_hook_list *entry = m_first_cgraph_duplicated_hook;
482 while (entry)
483 {
484 entry->hook (node, node2, entry->data);
485 entry = entry->next;
486 }
487 }
488
489 /* Return cgraph node assigned to DECL. Create new one when needed. */
490
491 cgraph_node *
492 cgraph_node::create (tree decl)
493 {
494 cgraph_node *node = symtab->create_empty ();
495 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
496
497 node->decl = decl;
498
499 if ((flag_openacc || flag_openmp)
500 && lookup_attribute ("omp declare target", DECL_ATTRIBUTES (decl)))
501 {
502 node->offloadable = 1;
503 if (ENABLE_OFFLOADING)
504 g->have_offload = true;
505 }
506
507 node->register_symbol ();
508
509 if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
510 {
511 node->origin = cgraph_node::get_create (DECL_CONTEXT (decl));
512 node->next_nested = node->origin->nested;
513 node->origin->nested = node;
514 }
515 return node;
516 }
517
518 /* Try to find a call graph node for declaration DECL and if it does not exist
519 or if it corresponds to an inline clone, create a new one. */
520
521 cgraph_node *
522 cgraph_node::get_create (tree decl)
523 {
524 cgraph_node *first_clone = cgraph_node::get (decl);
525
526 if (first_clone && !first_clone->global.inlined_to)
527 return first_clone;
528
529 cgraph_node *node = cgraph_node::create (decl);
530 if (first_clone)
531 {
532 first_clone->clone_of = node;
533 node->clones = first_clone;
534 symtab->symtab_prevail_in_asm_name_hash (node);
535 node->decl->decl_with_vis.symtab_node = node;
536 if (dump_file)
537 fprintf (dump_file, "Introduced new external node "
538 "(%s/%i) and turned into root of the clone tree.\n",
539 node->name (), node->order);
540 }
541 else if (dump_file)
542 fprintf (dump_file, "Introduced new external node "
543 "(%s/%i).\n", node->name (), node->order);
544 return node;
545 }
546
547 /* Mark ALIAS as an alias to DECL. DECL_NODE is cgraph node representing
548 the function body is associated with (not necessarily cgraph_node (DECL). */
549
550 cgraph_node *
551 cgraph_node::create_alias (tree alias, tree target)
552 {
553 cgraph_node *alias_node;
554
555 gcc_assert (TREE_CODE (target) == FUNCTION_DECL
556 || TREE_CODE (target) == IDENTIFIER_NODE);
557 gcc_assert (TREE_CODE (alias) == FUNCTION_DECL);
558 alias_node = cgraph_node::get_create (alias);
559 gcc_assert (!alias_node->definition);
560 alias_node->alias_target = target;
561 alias_node->definition = true;
562 alias_node->alias = true;
563 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (alias)) != NULL)
564 alias_node->transparent_alias = alias_node->weakref = true;
565 return alias_node;
566 }
567
568 /* Attempt to mark ALIAS as an alias to DECL. Return alias node if successful
569 and NULL otherwise.
570 Same body aliases are output whenever the body of DECL is output,
571 and cgraph_node::get (ALIAS) transparently returns
572 cgraph_node::get (DECL). */
573
574 cgraph_node *
575 cgraph_node::create_same_body_alias (tree alias, tree decl)
576 {
577 cgraph_node *n;
578 #ifndef ASM_OUTPUT_DEF
579 /* If aliases aren't supported by the assembler, fail. */
580 return NULL;
581 #endif
582 /* Langhooks can create same body aliases of symbols not defined.
583 Those are useless. Drop them on the floor. */
584 if (symtab->global_info_ready)
585 return NULL;
586
587 n = cgraph_node::create_alias (alias, decl);
588 n->cpp_implicit_alias = true;
589 if (symtab->cpp_implicit_aliases_done)
590 n->resolve_alias (cgraph_node::get (decl));
591 return n;
592 }
593
594 /* Add thunk alias into callgraph. The alias declaration is ALIAS and it
595 aliases DECL with an adjustments made into the first parameter.
596 See comments in thunk_adjust for detail on the parameters. */
597
598 cgraph_node *
599 cgraph_node::create_thunk (tree alias, tree, bool this_adjusting,
600 HOST_WIDE_INT fixed_offset,
601 HOST_WIDE_INT virtual_value,
602 tree virtual_offset,
603 tree real_alias)
604 {
605 cgraph_node *node;
606
607 node = cgraph_node::get (alias);
608 if (node)
609 node->reset ();
610 else
611 node = cgraph_node::create (alias);
612 gcc_checking_assert (!virtual_offset
613 || wi::eq_p (virtual_offset, virtual_value));
614 node->thunk.fixed_offset = fixed_offset;
615 node->thunk.this_adjusting = this_adjusting;
616 node->thunk.virtual_value = virtual_value;
617 node->thunk.virtual_offset_p = virtual_offset != NULL;
618 node->thunk.alias = real_alias;
619 node->thunk.thunk_p = true;
620 node->definition = true;
621
622 return node;
623 }
624
625 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
626 Return NULL if there's no such node. */
627
628 cgraph_node *
629 cgraph_node::get_for_asmname (tree asmname)
630 {
631 /* We do not want to look at inline clones. */
632 for (symtab_node *node = symtab_node::get_for_asmname (asmname);
633 node;
634 node = node->next_sharing_asm_name)
635 {
636 cgraph_node *cn = dyn_cast <cgraph_node *> (node);
637 if (cn && !cn->global.inlined_to)
638 return cn;
639 }
640 return NULL;
641 }
642
643 /* Returns a hash value for X (which really is a cgraph_edge). */
644
645 hashval_t
646 cgraph_edge_hasher::hash (cgraph_edge *e)
647 {
648 /* This is a really poor hash function, but it is what htab_hash_pointer
649 uses. */
650 return (hashval_t) ((intptr_t)e->call_stmt >> 3);
651 }
652
653 /* Returns a hash value for X (which really is a cgraph_edge). */
654
655 hashval_t
656 cgraph_edge_hasher::hash (gimple *call_stmt)
657 {
658 /* This is a really poor hash function, but it is what htab_hash_pointer
659 uses. */
660 return (hashval_t) ((intptr_t)call_stmt >> 3);
661 }
662
663 /* Return nonzero if the call_stmt of cgraph_edge X is stmt *Y. */
664
665 inline bool
666 cgraph_edge_hasher::equal (cgraph_edge *x, gimple *y)
667 {
668 return x->call_stmt == y;
669 }
670
671 /* Add call graph edge E to call site hash of its caller. */
672
673 static inline void
674 cgraph_update_edge_in_call_site_hash (cgraph_edge *e)
675 {
676 gimple *call = e->call_stmt;
677 *e->caller->call_site_hash->find_slot_with_hash
678 (call, cgraph_edge_hasher::hash (call), INSERT) = e;
679 }
680
681 /* Add call graph edge E to call site hash of its caller. */
682
683 static inline void
684 cgraph_add_edge_to_call_site_hash (cgraph_edge *e)
685 {
686 /* There are two speculative edges for every statement (one direct,
687 one indirect); always hash the direct one. */
688 if (e->speculative && e->indirect_unknown_callee)
689 return;
690 cgraph_edge **slot = e->caller->call_site_hash->find_slot_with_hash
691 (e->call_stmt, cgraph_edge_hasher::hash (e->call_stmt), INSERT);
692 if (*slot)
693 {
694 gcc_assert (((cgraph_edge *)*slot)->speculative);
695 if (e->callee)
696 *slot = e;
697 return;
698 }
699 gcc_assert (!*slot || e->speculative);
700 *slot = e;
701 }
702
703 /* Return the callgraph edge representing the GIMPLE_CALL statement
704 CALL_STMT. */
705
706 cgraph_edge *
707 cgraph_node::get_edge (gimple *call_stmt)
708 {
709 cgraph_edge *e, *e2;
710 int n = 0;
711
712 if (call_site_hash)
713 return call_site_hash->find_with_hash
714 (call_stmt, cgraph_edge_hasher::hash (call_stmt));
715
716 /* This loop may turn out to be performance problem. In such case adding
717 hashtables into call nodes with very many edges is probably best
718 solution. It is not good idea to add pointer into CALL_EXPR itself
719 because we want to make possible having multiple cgraph nodes representing
720 different clones of the same body before the body is actually cloned. */
721 for (e = callees; e; e = e->next_callee)
722 {
723 if (e->call_stmt == call_stmt)
724 break;
725 n++;
726 }
727
728 if (!e)
729 for (e = indirect_calls; e; e = e->next_callee)
730 {
731 if (e->call_stmt == call_stmt)
732 break;
733 n++;
734 }
735
736 if (n > 100)
737 {
738 call_site_hash = hash_table<cgraph_edge_hasher>::create_ggc (120);
739 for (e2 = callees; e2; e2 = e2->next_callee)
740 cgraph_add_edge_to_call_site_hash (e2);
741 for (e2 = indirect_calls; e2; e2 = e2->next_callee)
742 cgraph_add_edge_to_call_site_hash (e2);
743 }
744
745 return e;
746 }
747
748
749 /* Change field call_stmt of edge to NEW_STMT.
750 If UPDATE_SPECULATIVE and E is any component of speculative
751 edge, then update all components. */
752
753 void
754 cgraph_edge::set_call_stmt (gcall *new_stmt, bool update_speculative)
755 {
756 tree decl;
757
758 /* Speculative edges has three component, update all of them
759 when asked to. */
760 if (update_speculative && speculative)
761 {
762 cgraph_edge *direct, *indirect;
763 ipa_ref *ref;
764
765 speculative_call_info (direct, indirect, ref);
766 direct->set_call_stmt (new_stmt, false);
767 indirect->set_call_stmt (new_stmt, false);
768 ref->stmt = new_stmt;
769 return;
770 }
771
772 /* Only direct speculative edges go to call_site_hash. */
773 if (caller->call_site_hash
774 && (!speculative || !indirect_unknown_callee))
775 {
776 caller->call_site_hash->remove_elt_with_hash
777 (call_stmt, cgraph_edge_hasher::hash (call_stmt));
778 }
779
780 cgraph_edge *e = this;
781
782 call_stmt = new_stmt;
783 if (indirect_unknown_callee
784 && (decl = gimple_call_fndecl (new_stmt)))
785 {
786 /* Constant propagation (and possibly also inlining?) can turn an
787 indirect call into a direct one. */
788 cgraph_node *new_callee = cgraph_node::get (decl);
789
790 gcc_checking_assert (new_callee);
791 e = make_direct (new_callee);
792 }
793
794 push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
795 e->can_throw_external = stmt_can_throw_external (new_stmt);
796 pop_cfun ();
797 if (e->caller->call_site_hash)
798 cgraph_add_edge_to_call_site_hash (e);
799 }
800
801 /* Allocate a cgraph_edge structure and fill it with data according to the
802 parameters of which only CALLEE can be NULL (when creating an indirect call
803 edge). */
804
805 cgraph_edge *
806 symbol_table::create_edge (cgraph_node *caller, cgraph_node *callee,
807 gcall *call_stmt, gcov_type count, int freq,
808 bool indir_unknown_callee)
809 {
810 cgraph_edge *edge;
811
812 /* LTO does not actually have access to the call_stmt since these
813 have not been loaded yet. */
814 if (call_stmt)
815 {
816 /* This is a rather expensive check possibly triggering
817 construction of call stmt hashtable. */
818 cgraph_edge *e;
819 gcc_checking_assert (!(e = caller->get_edge (call_stmt))
820 || e->speculative);
821
822 gcc_assert (is_gimple_call (call_stmt));
823 }
824
825 if (free_edges)
826 {
827 edge = free_edges;
828 free_edges = NEXT_FREE_EDGE (edge);
829 }
830 else
831 {
832 edge = ggc_alloc<cgraph_edge> ();
833 edge->uid = edges_max_uid++;
834 }
835
836 edges_count++;
837
838 edge->aux = NULL;
839 edge->caller = caller;
840 edge->callee = callee;
841 edge->prev_caller = NULL;
842 edge->next_caller = NULL;
843 edge->prev_callee = NULL;
844 edge->next_callee = NULL;
845 edge->lto_stmt_uid = 0;
846
847 edge->count = count;
848 gcc_assert (count >= 0);
849 edge->frequency = freq;
850 gcc_assert (freq >= 0);
851 gcc_assert (freq <= CGRAPH_FREQ_MAX);
852
853 edge->call_stmt = call_stmt;
854 push_cfun (DECL_STRUCT_FUNCTION (caller->decl));
855 edge->can_throw_external
856 = call_stmt ? stmt_can_throw_external (call_stmt) : false;
857 pop_cfun ();
858 if (call_stmt
859 && callee && callee->decl
860 && !gimple_check_call_matching_types (call_stmt, callee->decl,
861 false))
862 edge->call_stmt_cannot_inline_p = true;
863 else
864 edge->call_stmt_cannot_inline_p = false;
865
866 edge->indirect_info = NULL;
867 edge->indirect_inlining_edge = 0;
868 edge->speculative = false;
869 edge->indirect_unknown_callee = indir_unknown_callee;
870 if (opt_for_fn (edge->caller->decl, flag_devirtualize)
871 && call_stmt && DECL_STRUCT_FUNCTION (caller->decl))
872 edge->in_polymorphic_cdtor
873 = decl_maybe_in_construction_p (NULL, NULL, call_stmt,
874 caller->decl);
875 else
876 edge->in_polymorphic_cdtor = caller->thunk.thunk_p;
877 if (call_stmt && caller->call_site_hash)
878 cgraph_add_edge_to_call_site_hash (edge);
879
880 return edge;
881 }
882
883 /* Create edge from a given function to CALLEE in the cgraph. */
884
885 cgraph_edge *
886 cgraph_node::create_edge (cgraph_node *callee,
887 gcall *call_stmt, gcov_type count, int freq)
888 {
889 cgraph_edge *edge = symtab->create_edge (this, callee, call_stmt, count,
890 freq, false);
891
892 initialize_inline_failed (edge);
893
894 edge->next_caller = callee->callers;
895 if (callee->callers)
896 callee->callers->prev_caller = edge;
897 edge->next_callee = callees;
898 if (callees)
899 callees->prev_callee = edge;
900 callees = edge;
901 callee->callers = edge;
902
903 return edge;
904 }
905
906 /* Allocate cgraph_indirect_call_info and set its fields to default values. */
907
908 cgraph_indirect_call_info *
909 cgraph_allocate_init_indirect_info (void)
910 {
911 cgraph_indirect_call_info *ii;
912
913 ii = ggc_cleared_alloc<cgraph_indirect_call_info> ();
914 ii->param_index = -1;
915 return ii;
916 }
917
918 /* Create an indirect edge with a yet-undetermined callee where the call
919 statement destination is a formal parameter of the caller with index
920 PARAM_INDEX. */
921
922 cgraph_edge *
923 cgraph_node::create_indirect_edge (gcall *call_stmt, int ecf_flags,
924 gcov_type count, int freq,
925 bool compute_indirect_info)
926 {
927 cgraph_edge *edge = symtab->create_edge (this, NULL, call_stmt,
928 count, freq, true);
929 tree target;
930
931 initialize_inline_failed (edge);
932
933 edge->indirect_info = cgraph_allocate_init_indirect_info ();
934 edge->indirect_info->ecf_flags = ecf_flags;
935 edge->indirect_info->vptr_changed = true;
936
937 /* Record polymorphic call info. */
938 if (compute_indirect_info
939 && call_stmt
940 && (target = gimple_call_fn (call_stmt))
941 && virtual_method_call_p (target))
942 {
943 ipa_polymorphic_call_context context (decl, target, call_stmt);
944
945 /* Only record types can have virtual calls. */
946 edge->indirect_info->polymorphic = true;
947 edge->indirect_info->param_index = -1;
948 edge->indirect_info->otr_token
949 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
950 edge->indirect_info->otr_type = obj_type_ref_class (target);
951 gcc_assert (TREE_CODE (edge->indirect_info->otr_type) == RECORD_TYPE);
952 edge->indirect_info->context = context;
953 }
954
955 edge->next_callee = indirect_calls;
956 if (indirect_calls)
957 indirect_calls->prev_callee = edge;
958 indirect_calls = edge;
959
960 return edge;
961 }
962
963 /* Remove the edge from the list of the callees of the caller. */
964
965 void
966 cgraph_edge::remove_caller (void)
967 {
968 if (prev_callee)
969 prev_callee->next_callee = next_callee;
970 if (next_callee)
971 next_callee->prev_callee = prev_callee;
972 if (!prev_callee)
973 {
974 if (indirect_unknown_callee)
975 caller->indirect_calls = next_callee;
976 else
977 caller->callees = next_callee;
978 }
979 if (caller->call_site_hash)
980 caller->call_site_hash->remove_elt_with_hash
981 (call_stmt, cgraph_edge_hasher::hash (call_stmt));
982 }
983
984 /* Put the edge onto the free list. */
985
986 void
987 symbol_table::free_edge (cgraph_edge *e)
988 {
989 int uid = e->uid;
990
991 if (e->indirect_info)
992 ggc_free (e->indirect_info);
993
994 /* Clear out the edge so we do not dangle pointers. */
995 memset (e, 0, sizeof (*e));
996 e->uid = uid;
997 NEXT_FREE_EDGE (e) = free_edges;
998 free_edges = e;
999 edges_count--;
1000 }
1001
1002 /* Remove the edge in the cgraph. */
1003
1004 void
1005 cgraph_edge::remove (void)
1006 {
1007 /* Call all edge removal hooks. */
1008 symtab->call_edge_removal_hooks (this);
1009
1010 if (!indirect_unknown_callee)
1011 /* Remove from callers list of the callee. */
1012 remove_callee ();
1013
1014 /* Remove from callees list of the callers. */
1015 remove_caller ();
1016
1017 /* Put the edge onto the free list. */
1018 symtab->free_edge (this);
1019 }
1020
1021 /* Turn edge into speculative call calling N2. Update
1022 the profile so the direct call is taken COUNT times
1023 with FREQUENCY.
1024
1025 At clone materialization time, the indirect call E will
1026 be expanded as:
1027
1028 if (call_dest == N2)
1029 n2 ();
1030 else
1031 call call_dest
1032
1033 At this time the function just creates the direct call,
1034 the referencd representing the if conditional and attaches
1035 them all to the orginal indirect call statement.
1036
1037 Return direct edge created. */
1038
1039 cgraph_edge *
1040 cgraph_edge::make_speculative (cgraph_node *n2, gcov_type direct_count,
1041 int direct_frequency)
1042 {
1043 cgraph_node *n = caller;
1044 ipa_ref *ref = NULL;
1045 cgraph_edge *e2;
1046
1047 if (dump_file)
1048 {
1049 fprintf (dump_file, "Indirect call -> speculative call"
1050 " %s/%i => %s/%i\n",
1051 xstrdup_for_dump (n->name ()), n->order,
1052 xstrdup_for_dump (n2->name ()), n2->order);
1053 }
1054 speculative = true;
1055 e2 = n->create_edge (n2, call_stmt, direct_count, direct_frequency);
1056 initialize_inline_failed (e2);
1057 e2->speculative = true;
1058 if (TREE_NOTHROW (n2->decl))
1059 e2->can_throw_external = false;
1060 else
1061 e2->can_throw_external = can_throw_external;
1062 e2->lto_stmt_uid = lto_stmt_uid;
1063 e2->in_polymorphic_cdtor = in_polymorphic_cdtor;
1064 count -= e2->count;
1065 frequency -= e2->frequency;
1066 symtab->call_edge_duplication_hooks (this, e2);
1067 ref = n->create_reference (n2, IPA_REF_ADDR, call_stmt);
1068 ref->lto_stmt_uid = lto_stmt_uid;
1069 ref->speculative = speculative;
1070 n2->mark_address_taken ();
1071 return e2;
1072 }
1073
1074 /* Speculative call consist of three components:
1075 1) an indirect edge representing the original call
1076 2) an direct edge representing the new call
1077 3) ADDR_EXPR reference representing the speculative check.
1078 All three components are attached to single statement (the indirect
1079 call) and if one of them exists, all of them must exist.
1080
1081 Given speculative call edge, return all three components.
1082 */
1083
1084 void
1085 cgraph_edge::speculative_call_info (cgraph_edge *&direct,
1086 cgraph_edge *&indirect,
1087 ipa_ref *&reference)
1088 {
1089 ipa_ref *ref;
1090 int i;
1091 cgraph_edge *e2;
1092 cgraph_edge *e = this;
1093
1094 if (!e->indirect_unknown_callee)
1095 for (e2 = e->caller->indirect_calls;
1096 e2->call_stmt != e->call_stmt || e2->lto_stmt_uid != e->lto_stmt_uid;
1097 e2 = e2->next_callee)
1098 ;
1099 else
1100 {
1101 e2 = e;
1102 /* We can take advantage of the call stmt hash. */
1103 if (e2->call_stmt)
1104 {
1105 e = e->caller->get_edge (e2->call_stmt);
1106 gcc_assert (e->speculative && !e->indirect_unknown_callee);
1107 }
1108 else
1109 for (e = e->caller->callees;
1110 e2->call_stmt != e->call_stmt
1111 || e2->lto_stmt_uid != e->lto_stmt_uid;
1112 e = e->next_callee)
1113 ;
1114 }
1115 gcc_assert (e->speculative && e2->speculative);
1116 direct = e;
1117 indirect = e2;
1118
1119 reference = NULL;
1120 for (i = 0; e->caller->iterate_reference (i, ref); i++)
1121 if (ref->speculative
1122 && ((ref->stmt && ref->stmt == e->call_stmt)
1123 || (!ref->stmt && ref->lto_stmt_uid == e->lto_stmt_uid)))
1124 {
1125 reference = ref;
1126 break;
1127 }
1128
1129 /* Speculative edge always consist of all three components - direct edge,
1130 indirect and reference. */
1131
1132 gcc_assert (e && e2 && ref);
1133 }
1134
1135 /* Speculative call edge turned out to be direct call to CALLE_DECL.
1136 Remove the speculative call sequence and return edge representing the call.
1137 It is up to caller to redirect the call as appropriate. */
1138
1139 cgraph_edge *
1140 cgraph_edge::resolve_speculation (tree callee_decl)
1141 {
1142 cgraph_edge *edge = this;
1143 cgraph_edge *e2;
1144 ipa_ref *ref;
1145
1146 gcc_assert (edge->speculative);
1147 edge->speculative_call_info (e2, edge, ref);
1148 if (!callee_decl
1149 || !ref->referred->semantically_equivalent_p
1150 (symtab_node::get (callee_decl)))
1151 {
1152 if (dump_file)
1153 {
1154 if (callee_decl)
1155 {
1156 fprintf (dump_file, "Speculative indirect call %s/%i => %s/%i has "
1157 "turned out to have contradicting known target ",
1158 xstrdup_for_dump (edge->caller->name ()),
1159 edge->caller->order,
1160 xstrdup_for_dump (e2->callee->name ()),
1161 e2->callee->order);
1162 print_generic_expr (dump_file, callee_decl, 0);
1163 fprintf (dump_file, "\n");
1164 }
1165 else
1166 {
1167 fprintf (dump_file, "Removing speculative call %s/%i => %s/%i\n",
1168 xstrdup_for_dump (edge->caller->name ()),
1169 edge->caller->order,
1170 xstrdup_for_dump (e2->callee->name ()),
1171 e2->callee->order);
1172 }
1173 }
1174 }
1175 else
1176 {
1177 cgraph_edge *tmp = edge;
1178 if (dump_file)
1179 fprintf (dump_file, "Speculative call turned into direct call.\n");
1180 edge = e2;
1181 e2 = tmp;
1182 /* FIXME: If EDGE is inlined, we should scale up the frequencies and counts
1183 in the functions inlined through it. */
1184 }
1185 edge->count += e2->count;
1186 edge->frequency += e2->frequency;
1187 if (edge->frequency > CGRAPH_FREQ_MAX)
1188 edge->frequency = CGRAPH_FREQ_MAX;
1189 edge->speculative = false;
1190 e2->speculative = false;
1191 ref->remove_reference ();
1192 if (e2->indirect_unknown_callee || e2->inline_failed)
1193 e2->remove ();
1194 else
1195 e2->callee->remove_symbol_and_inline_clones ();
1196 if (edge->caller->call_site_hash)
1197 cgraph_update_edge_in_call_site_hash (edge);
1198 return edge;
1199 }
1200
1201 /* Make an indirect edge with an unknown callee an ordinary edge leading to
1202 CALLEE. DELTA is an integer constant that is to be added to the this
1203 pointer (first parameter) to compensate for skipping a thunk adjustment. */
1204
1205 cgraph_edge *
1206 cgraph_edge::make_direct (cgraph_node *callee)
1207 {
1208 cgraph_edge *edge = this;
1209 gcc_assert (indirect_unknown_callee);
1210
1211 /* If we are redirecting speculative call, make it non-speculative. */
1212 if (indirect_unknown_callee && speculative)
1213 {
1214 edge = edge->resolve_speculation (callee->decl);
1215
1216 /* On successful speculation just return the pre existing direct edge. */
1217 if (!indirect_unknown_callee)
1218 return edge;
1219 }
1220
1221 indirect_unknown_callee = 0;
1222 ggc_free (indirect_info);
1223 indirect_info = NULL;
1224
1225 /* Get the edge out of the indirect edge list. */
1226 if (prev_callee)
1227 prev_callee->next_callee = next_callee;
1228 if (next_callee)
1229 next_callee->prev_callee = prev_callee;
1230 if (!prev_callee)
1231 caller->indirect_calls = next_callee;
1232
1233 /* Put it into the normal callee list */
1234 prev_callee = NULL;
1235 next_callee = caller->callees;
1236 if (caller->callees)
1237 caller->callees->prev_callee = edge;
1238 caller->callees = edge;
1239
1240 /* Insert to callers list of the new callee. */
1241 edge->set_callee (callee);
1242
1243 if (call_stmt)
1244 call_stmt_cannot_inline_p
1245 = !gimple_check_call_matching_types (call_stmt, callee->decl,
1246 false);
1247
1248 /* We need to re-determine the inlining status of the edge. */
1249 initialize_inline_failed (edge);
1250 return edge;
1251 }
1252
1253 /* If necessary, change the function declaration in the call statement
1254 associated with E so that it corresponds to the edge callee. */
1255
1256 gimple *
1257 cgraph_edge::redirect_call_stmt_to_callee (void)
1258 {
1259 cgraph_edge *e = this;
1260
1261 tree decl = gimple_call_fndecl (e->call_stmt);
1262 tree lhs = gimple_call_lhs (e->call_stmt);
1263 gcall *new_stmt;
1264 gimple_stmt_iterator gsi;
1265 bool skip_bounds = false;
1266
1267 if (e->speculative)
1268 {
1269 cgraph_edge *e2;
1270 gcall *new_stmt;
1271 ipa_ref *ref;
1272
1273 e->speculative_call_info (e, e2, ref);
1274 /* If there already is an direct call (i.e. as a result of inliner's
1275 substitution), forget about speculating. */
1276 if (decl)
1277 e = e->resolve_speculation (decl);
1278 /* If types do not match, speculation was likely wrong.
1279 The direct edge was possibly redirected to the clone with a different
1280 signature. We did not update the call statement yet, so compare it
1281 with the reference that still points to the proper type. */
1282 else if (!gimple_check_call_matching_types (e->call_stmt,
1283 ref->referred->decl,
1284 true))
1285 {
1286 if (dump_file)
1287 fprintf (dump_file, "Not expanding speculative call of %s/%i -> %s/%i\n"
1288 "Type mismatch.\n",
1289 xstrdup_for_dump (e->caller->name ()),
1290 e->caller->order,
1291 xstrdup_for_dump (e->callee->name ()),
1292 e->callee->order);
1293 e = e->resolve_speculation ();
1294 /* We are producing the final function body and will throw away the
1295 callgraph edges really soon. Reset the counts/frequencies to
1296 keep verifier happy in the case of roundoff errors. */
1297 e->count = gimple_bb (e->call_stmt)->count;
1298 e->frequency = compute_call_stmt_bb_frequency
1299 (e->caller->decl, gimple_bb (e->call_stmt));
1300 }
1301 /* Expand speculation into GIMPLE code. */
1302 else
1303 {
1304 if (dump_file)
1305 fprintf (dump_file,
1306 "Expanding speculative call of %s/%i -> %s/%i count:"
1307 "%" PRId64"\n",
1308 xstrdup_for_dump (e->caller->name ()),
1309 e->caller->order,
1310 xstrdup_for_dump (e->callee->name ()),
1311 e->callee->order,
1312 (int64_t)e->count);
1313 gcc_assert (e2->speculative);
1314 push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
1315 new_stmt = gimple_ic (e->call_stmt,
1316 dyn_cast<cgraph_node *> (ref->referred),
1317 e->count || e2->count
1318 ? RDIV (e->count * REG_BR_PROB_BASE,
1319 e->count + e2->count)
1320 : e->frequency || e2->frequency
1321 ? RDIV (e->frequency * REG_BR_PROB_BASE,
1322 e->frequency + e2->frequency)
1323 : REG_BR_PROB_BASE / 2,
1324 e->count, e->count + e2->count);
1325 e->speculative = false;
1326 e->caller->set_call_stmt_including_clones (e->call_stmt, new_stmt,
1327 false);
1328
1329 /* Fix edges for BUILT_IN_CHKP_BNDRET calls attached to the
1330 processed call stmt. */
1331 if (gimple_call_with_bounds_p (new_stmt)
1332 && gimple_call_lhs (new_stmt)
1333 && chkp_retbnd_call_by_val (gimple_call_lhs (e2->call_stmt)))
1334 {
1335 tree dresult = gimple_call_lhs (new_stmt);
1336 tree iresult = gimple_call_lhs (e2->call_stmt);
1337 gcall *dbndret = chkp_retbnd_call_by_val (dresult);
1338 gcall *ibndret = chkp_retbnd_call_by_val (iresult);
1339 struct cgraph_edge *iedge
1340 = e2->caller->cgraph_node::get_edge (ibndret);
1341 struct cgraph_edge *dedge;
1342
1343 if (dbndret)
1344 {
1345 dedge = iedge->caller->create_edge (iedge->callee,
1346 dbndret, e->count,
1347 e->frequency);
1348 dedge->frequency = compute_call_stmt_bb_frequency
1349 (dedge->caller->decl, gimple_bb (dedge->call_stmt));
1350 }
1351 iedge->frequency = compute_call_stmt_bb_frequency
1352 (iedge->caller->decl, gimple_bb (iedge->call_stmt));
1353 }
1354
1355 e->frequency = compute_call_stmt_bb_frequency
1356 (e->caller->decl, gimple_bb (e->call_stmt));
1357 e2->frequency = compute_call_stmt_bb_frequency
1358 (e2->caller->decl, gimple_bb (e2->call_stmt));
1359 e2->speculative = false;
1360 ref->speculative = false;
1361 ref->stmt = NULL;
1362 /* Indirect edges are not both in the call site hash.
1363 get it updated. */
1364 if (e->caller->call_site_hash)
1365 cgraph_update_edge_in_call_site_hash (e2);
1366 pop_cfun ();
1367 /* Continue redirecting E to proper target. */
1368 }
1369 }
1370
1371 /* We might propagate instrumented function pointer into
1372 not instrumented function and vice versa. In such a
1373 case we need to either fix function declaration or
1374 remove bounds from call statement. */
1375 if (flag_check_pointer_bounds && e->callee)
1376 skip_bounds = chkp_redirect_edge (e);
1377
1378 if (e->indirect_unknown_callee
1379 || (decl == e->callee->decl
1380 && !skip_bounds))
1381 return e->call_stmt;
1382
1383 if (flag_checking && decl)
1384 {
1385 cgraph_node *node = cgraph_node::get (decl);
1386 gcc_assert (!node || !node->clone.combined_args_to_skip);
1387 }
1388
1389 if (symtab->dump_file)
1390 {
1391 fprintf (symtab->dump_file, "updating call of %s/%i -> %s/%i: ",
1392 xstrdup_for_dump (e->caller->name ()), e->caller->order,
1393 xstrdup_for_dump (e->callee->name ()), e->callee->order);
1394 print_gimple_stmt (symtab->dump_file, e->call_stmt, 0, dump_flags);
1395 if (e->callee->clone.combined_args_to_skip)
1396 {
1397 fprintf (symtab->dump_file, " combined args to skip: ");
1398 dump_bitmap (symtab->dump_file,
1399 e->callee->clone.combined_args_to_skip);
1400 }
1401 }
1402
1403 if (e->callee->clone.combined_args_to_skip
1404 || skip_bounds)
1405 {
1406 int lp_nr;
1407
1408 new_stmt = e->call_stmt;
1409 if (e->callee->clone.combined_args_to_skip)
1410 new_stmt
1411 = gimple_call_copy_skip_args (new_stmt,
1412 e->callee->clone.combined_args_to_skip);
1413 if (skip_bounds)
1414 new_stmt = chkp_copy_call_skip_bounds (new_stmt);
1415
1416 gimple_call_set_fndecl (new_stmt, e->callee->decl);
1417 gimple_call_set_fntype (new_stmt, gimple_call_fntype (e->call_stmt));
1418
1419 if (gimple_vdef (new_stmt)
1420 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1421 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1422
1423 gsi = gsi_for_stmt (e->call_stmt);
1424
1425 /* For optimized away parameters, add on the caller side
1426 before the call
1427 DEBUG D#X => parm_Y(D)
1428 stmts and associate D#X with parm in decl_debug_args_lookup
1429 vector to say for debug info that if parameter parm had been passed,
1430 it would have value parm_Y(D). */
1431 if (e->callee->clone.combined_args_to_skip && MAY_HAVE_DEBUG_STMTS)
1432 {
1433 vec<tree, va_gc> **debug_args
1434 = decl_debug_args_lookup (e->callee->decl);
1435 tree old_decl = gimple_call_fndecl (e->call_stmt);
1436 if (debug_args && old_decl)
1437 {
1438 tree parm;
1439 unsigned i = 0, num;
1440 unsigned len = vec_safe_length (*debug_args);
1441 unsigned nargs = gimple_call_num_args (e->call_stmt);
1442 for (parm = DECL_ARGUMENTS (old_decl), num = 0;
1443 parm && num < nargs;
1444 parm = DECL_CHAIN (parm), num++)
1445 if (bitmap_bit_p (e->callee->clone.combined_args_to_skip, num)
1446 && is_gimple_reg (parm))
1447 {
1448 unsigned last = i;
1449
1450 while (i < len && (**debug_args)[i] != DECL_ORIGIN (parm))
1451 i += 2;
1452 if (i >= len)
1453 {
1454 i = 0;
1455 while (i < last
1456 && (**debug_args)[i] != DECL_ORIGIN (parm))
1457 i += 2;
1458 if (i >= last)
1459 continue;
1460 }
1461 tree ddecl = (**debug_args)[i + 1];
1462 tree arg = gimple_call_arg (e->call_stmt, num);
1463 if (!useless_type_conversion_p (TREE_TYPE (ddecl),
1464 TREE_TYPE (arg)))
1465 {
1466 tree rhs1;
1467 if (!fold_convertible_p (TREE_TYPE (ddecl), arg))
1468 continue;
1469 if (TREE_CODE (arg) == SSA_NAME
1470 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (arg))
1471 && (rhs1
1472 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (arg)))
1473 && useless_type_conversion_p (TREE_TYPE (ddecl),
1474 TREE_TYPE (rhs1)))
1475 arg = rhs1;
1476 else
1477 arg = fold_convert (TREE_TYPE (ddecl), arg);
1478 }
1479
1480 gimple *def_temp
1481 = gimple_build_debug_bind (ddecl, unshare_expr (arg),
1482 e->call_stmt);
1483 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
1484 }
1485 }
1486 }
1487
1488 gsi_replace (&gsi, new_stmt, false);
1489 /* We need to defer cleaning EH info on the new statement to
1490 fixup-cfg. We may not have dominator information at this point
1491 and thus would end up with unreachable blocks and have no way
1492 to communicate that we need to run CFG cleanup then. */
1493 lp_nr = lookup_stmt_eh_lp (e->call_stmt);
1494 if (lp_nr != 0)
1495 {
1496 remove_stmt_from_eh_lp (e->call_stmt);
1497 add_stmt_to_eh_lp (new_stmt, lp_nr);
1498 }
1499 }
1500 else
1501 {
1502 new_stmt = e->call_stmt;
1503 gimple_call_set_fndecl (new_stmt, e->callee->decl);
1504 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1505 }
1506
1507 /* If the call becomes noreturn, remove the LHS if possible. */
1508 if (lhs
1509 && (gimple_call_flags (new_stmt) & ECF_NORETURN)
1510 && TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (lhs))) == INTEGER_CST)
1511 {
1512 if (TREE_CODE (lhs) == SSA_NAME)
1513 {
1514 tree var = create_tmp_reg_fn (DECL_STRUCT_FUNCTION (e->caller->decl),
1515 TREE_TYPE (lhs), NULL);
1516 var = get_or_create_ssa_default_def
1517 (DECL_STRUCT_FUNCTION (e->caller->decl), var);
1518 gimple *set_stmt = gimple_build_assign (lhs, var);
1519 gsi = gsi_for_stmt (new_stmt);
1520 gsi_insert_before_without_update (&gsi, set_stmt, GSI_SAME_STMT);
1521 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), set_stmt);
1522 }
1523 gimple_call_set_lhs (new_stmt, NULL_TREE);
1524 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1525 }
1526
1527 /* If new callee has no static chain, remove it. */
1528 if (gimple_call_chain (new_stmt) && !DECL_STATIC_CHAIN (e->callee->decl))
1529 {
1530 gimple_call_set_chain (new_stmt, NULL);
1531 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1532 }
1533
1534 maybe_remove_unused_call_args (DECL_STRUCT_FUNCTION (e->caller->decl),
1535 new_stmt);
1536
1537 e->caller->set_call_stmt_including_clones (e->call_stmt, new_stmt, false);
1538
1539 if (symtab->dump_file)
1540 {
1541 fprintf (symtab->dump_file, " updated to:");
1542 print_gimple_stmt (symtab->dump_file, e->call_stmt, 0, dump_flags);
1543 }
1544 return new_stmt;
1545 }
1546
1547 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1548 OLD_STMT changed into NEW_STMT. OLD_CALL is gimple_call_fndecl
1549 of OLD_STMT if it was previously call statement.
1550 If NEW_STMT is NULL, the call has been dropped without any
1551 replacement. */
1552
1553 static void
1554 cgraph_update_edges_for_call_stmt_node (cgraph_node *node,
1555 gimple *old_stmt, tree old_call,
1556 gimple *new_stmt)
1557 {
1558 tree new_call = (new_stmt && is_gimple_call (new_stmt))
1559 ? gimple_call_fndecl (new_stmt) : 0;
1560
1561 /* We are seeing indirect calls, then there is nothing to update. */
1562 if (!new_call && !old_call)
1563 return;
1564 /* See if we turned indirect call into direct call or folded call to one builtin
1565 into different builtin. */
1566 if (old_call != new_call)
1567 {
1568 cgraph_edge *e = node->get_edge (old_stmt);
1569 cgraph_edge *ne = NULL;
1570 gcov_type count;
1571 int frequency;
1572
1573 if (e)
1574 {
1575 /* Keep calls marked as dead dead. */
1576 if (new_stmt && is_gimple_call (new_stmt) && e->callee
1577 && DECL_BUILT_IN_CLASS (e->callee->decl) == BUILT_IN_NORMAL
1578 && DECL_FUNCTION_CODE (e->callee->decl) == BUILT_IN_UNREACHABLE)
1579 {
1580 node->get_edge (old_stmt)->set_call_stmt
1581 (as_a <gcall *> (new_stmt));
1582 return;
1583 }
1584 /* See if the edge is already there and has the correct callee. It
1585 might be so because of indirect inlining has already updated
1586 it. We also might've cloned and redirected the edge. */
1587 if (new_call && e->callee)
1588 {
1589 cgraph_node *callee = e->callee;
1590 while (callee)
1591 {
1592 if (callee->decl == new_call
1593 || callee->former_clone_of == new_call)
1594 {
1595 e->set_call_stmt (as_a <gcall *> (new_stmt));
1596 return;
1597 }
1598 callee = callee->clone_of;
1599 }
1600 }
1601
1602 /* Otherwise remove edge and create new one; we can't simply redirect
1603 since function has changed, so inline plan and other information
1604 attached to edge is invalid. */
1605 count = e->count;
1606 frequency = e->frequency;
1607 if (e->indirect_unknown_callee || e->inline_failed)
1608 e->remove ();
1609 else
1610 e->callee->remove_symbol_and_inline_clones ();
1611 }
1612 else if (new_call)
1613 {
1614 /* We are seeing new direct call; compute profile info based on BB. */
1615 basic_block bb = gimple_bb (new_stmt);
1616 count = bb->count;
1617 frequency = compute_call_stmt_bb_frequency (current_function_decl,
1618 bb);
1619 }
1620
1621 if (new_call)
1622 {
1623 ne = node->create_edge (cgraph_node::get_create (new_call),
1624 as_a <gcall *> (new_stmt), count,
1625 frequency);
1626 gcc_assert (ne->inline_failed);
1627 }
1628 }
1629 /* We only updated the call stmt; update pointer in cgraph edge.. */
1630 else if (old_stmt != new_stmt)
1631 node->get_edge (old_stmt)->set_call_stmt (as_a <gcall *> (new_stmt));
1632 }
1633
1634 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1635 OLD_STMT changed into NEW_STMT. OLD_DECL is gimple_call_fndecl
1636 of OLD_STMT before it was updated (updating can happen inplace). */
1637
1638 void
1639 cgraph_update_edges_for_call_stmt (gimple *old_stmt, tree old_decl,
1640 gimple *new_stmt)
1641 {
1642 cgraph_node *orig = cgraph_node::get (cfun->decl);
1643 cgraph_node *node;
1644
1645 gcc_checking_assert (orig);
1646 cgraph_update_edges_for_call_stmt_node (orig, old_stmt, old_decl, new_stmt);
1647 if (orig->clones)
1648 for (node = orig->clones; node != orig;)
1649 {
1650 cgraph_update_edges_for_call_stmt_node (node, old_stmt, old_decl, new_stmt);
1651 if (node->clones)
1652 node = node->clones;
1653 else if (node->next_sibling_clone)
1654 node = node->next_sibling_clone;
1655 else
1656 {
1657 while (node != orig && !node->next_sibling_clone)
1658 node = node->clone_of;
1659 if (node != orig)
1660 node = node->next_sibling_clone;
1661 }
1662 }
1663 }
1664
1665
1666 /* Remove all callees from the node. */
1667
1668 void
1669 cgraph_node::remove_callees (void)
1670 {
1671 cgraph_edge *e, *f;
1672
1673 /* It is sufficient to remove the edges from the lists of callers of
1674 the callees. The callee list of the node can be zapped with one
1675 assignment. */
1676 for (e = callees; e; e = f)
1677 {
1678 f = e->next_callee;
1679 symtab->call_edge_removal_hooks (e);
1680 if (!e->indirect_unknown_callee)
1681 e->remove_callee ();
1682 symtab->free_edge (e);
1683 }
1684 for (e = indirect_calls; e; e = f)
1685 {
1686 f = e->next_callee;
1687 symtab->call_edge_removal_hooks (e);
1688 if (!e->indirect_unknown_callee)
1689 e->remove_callee ();
1690 symtab->free_edge (e);
1691 }
1692 indirect_calls = NULL;
1693 callees = NULL;
1694 if (call_site_hash)
1695 {
1696 call_site_hash->empty ();
1697 call_site_hash = NULL;
1698 }
1699 }
1700
1701 /* Remove all callers from the node. */
1702
1703 void
1704 cgraph_node::remove_callers (void)
1705 {
1706 cgraph_edge *e, *f;
1707
1708 /* It is sufficient to remove the edges from the lists of callees of
1709 the callers. The caller list of the node can be zapped with one
1710 assignment. */
1711 for (e = callers; e; e = f)
1712 {
1713 f = e->next_caller;
1714 symtab->call_edge_removal_hooks (e);
1715 e->remove_caller ();
1716 symtab->free_edge (e);
1717 }
1718 callers = NULL;
1719 }
1720
1721 /* Helper function for cgraph_release_function_body and free_lang_data.
1722 It releases body from function DECL without having to inspect its
1723 possibly non-existent symtab node. */
1724
1725 void
1726 release_function_body (tree decl)
1727 {
1728 function *fn = DECL_STRUCT_FUNCTION (decl);
1729 if (fn)
1730 {
1731 if (fn->cfg
1732 || fn->gimple_df)
1733 {
1734 if (fn->cfg
1735 && loops_for_fn (fn))
1736 {
1737 fn->curr_properties &= ~PROP_loops;
1738 loop_optimizer_finalize (fn);
1739 }
1740 if (fn->gimple_df)
1741 {
1742 delete_tree_ssa (fn);
1743 delete_tree_cfg_annotations (fn);
1744 fn->eh = NULL;
1745 }
1746 if (fn->cfg)
1747 {
1748 gcc_assert (!dom_info_available_p (fn, CDI_DOMINATORS));
1749 gcc_assert (!dom_info_available_p (fn, CDI_POST_DOMINATORS));
1750 clear_edges (fn);
1751 fn->cfg = NULL;
1752 }
1753 if (fn->value_histograms)
1754 free_histograms (fn);
1755 }
1756 gimple_set_body (decl, NULL);
1757 /* Struct function hangs a lot of data that would leak if we didn't
1758 removed all pointers to it. */
1759 ggc_free (fn);
1760 DECL_STRUCT_FUNCTION (decl) = NULL;
1761 }
1762 DECL_SAVED_TREE (decl) = NULL;
1763 }
1764
1765 /* Release memory used to represent body of function.
1766 Use this only for functions that are released before being translated to
1767 target code (i.e. RTL). Functions that are compiled to RTL and beyond
1768 are free'd in final.c via free_after_compilation().
1769 KEEP_ARGUMENTS are useful only if you want to rebuild body as thunk. */
1770
1771 void
1772 cgraph_node::release_body (bool keep_arguments)
1773 {
1774 ipa_transforms_to_apply.release ();
1775 if (!used_as_abstract_origin && symtab->state != PARSING)
1776 {
1777 DECL_RESULT (decl) = NULL;
1778
1779 if (!keep_arguments)
1780 DECL_ARGUMENTS (decl) = NULL;
1781 }
1782 /* If the node is abstract and needed, then do not clear
1783 DECL_INITIAL of its associated function declaration because it's
1784 needed to emit debug info later. */
1785 if (!used_as_abstract_origin && DECL_INITIAL (decl))
1786 DECL_INITIAL (decl) = error_mark_node;
1787 release_function_body (decl);
1788 if (lto_file_data)
1789 {
1790 lto_free_function_in_decl_state_for_node (this);
1791 lto_file_data = NULL;
1792 }
1793 }
1794
1795 /* Remove function from symbol table. */
1796
1797 void
1798 cgraph_node::remove (void)
1799 {
1800 cgraph_node *n;
1801 int uid = this->uid;
1802
1803 symtab->call_cgraph_removal_hooks (this);
1804 remove_callers ();
1805 remove_callees ();
1806 ipa_transforms_to_apply.release ();
1807
1808 /* Incremental inlining access removed nodes stored in the postorder list.
1809 */
1810 force_output = false;
1811 forced_by_abi = false;
1812 for (n = nested; n; n = n->next_nested)
1813 n->origin = NULL;
1814 nested = NULL;
1815 if (origin)
1816 {
1817 cgraph_node **node2 = &origin->nested;
1818
1819 while (*node2 != this)
1820 node2 = &(*node2)->next_nested;
1821 *node2 = next_nested;
1822 }
1823 unregister ();
1824 if (prev_sibling_clone)
1825 prev_sibling_clone->next_sibling_clone = next_sibling_clone;
1826 else if (clone_of)
1827 clone_of->clones = next_sibling_clone;
1828 if (next_sibling_clone)
1829 next_sibling_clone->prev_sibling_clone = prev_sibling_clone;
1830 if (clones)
1831 {
1832 cgraph_node *n, *next;
1833
1834 if (clone_of)
1835 {
1836 for (n = clones; n->next_sibling_clone; n = n->next_sibling_clone)
1837 n->clone_of = clone_of;
1838 n->clone_of = clone_of;
1839 n->next_sibling_clone = clone_of->clones;
1840 if (clone_of->clones)
1841 clone_of->clones->prev_sibling_clone = n;
1842 clone_of->clones = clones;
1843 }
1844 else
1845 {
1846 /* We are removing node with clones. This makes clones inconsistent,
1847 but assume they will be removed subsequently and just keep clone
1848 tree intact. This can happen in unreachable function removal since
1849 we remove unreachable functions in random order, not by bottom-up
1850 walk of clone trees. */
1851 for (n = clones; n; n = next)
1852 {
1853 next = n->next_sibling_clone;
1854 n->next_sibling_clone = NULL;
1855 n->prev_sibling_clone = NULL;
1856 n->clone_of = NULL;
1857 }
1858 }
1859 }
1860
1861 /* While all the clones are removed after being proceeded, the function
1862 itself is kept in the cgraph even after it is compiled. Check whether
1863 we are done with this body and reclaim it proactively if this is the case.
1864 */
1865 if (symtab->state != LTO_STREAMING)
1866 {
1867 n = cgraph_node::get (decl);
1868 if (!n
1869 || (!n->clones && !n->clone_of && !n->global.inlined_to
1870 && ((symtab->global_info_ready || in_lto_p)
1871 && (TREE_ASM_WRITTEN (n->decl)
1872 || DECL_EXTERNAL (n->decl)
1873 || !n->analyzed
1874 || (!flag_wpa && n->in_other_partition)))))
1875 release_body ();
1876 }
1877 else
1878 {
1879 lto_free_function_in_decl_state_for_node (this);
1880 lto_file_data = NULL;
1881 }
1882
1883 decl = NULL;
1884 if (call_site_hash)
1885 {
1886 call_site_hash->empty ();
1887 call_site_hash = NULL;
1888 }
1889
1890 if (instrumented_version)
1891 {
1892 instrumented_version->instrumented_version = NULL;
1893 instrumented_version = NULL;
1894 }
1895
1896 symtab->release_symbol (this, uid);
1897 }
1898
1899 /* Likewise indicate that a node is having address taken. */
1900
1901 void
1902 cgraph_node::mark_address_taken (void)
1903 {
1904 /* Indirect inlining can figure out that all uses of the address are
1905 inlined. */
1906 if (global.inlined_to)
1907 {
1908 gcc_assert (cfun->after_inlining);
1909 gcc_assert (callers->indirect_inlining_edge);
1910 return;
1911 }
1912 /* FIXME: address_taken flag is used both as a shortcut for testing whether
1913 IPA_REF_ADDR reference exists (and thus it should be set on node
1914 representing alias we take address of) and as a test whether address
1915 of the object was taken (and thus it should be set on node alias is
1916 referring to). We should remove the first use and the remove the
1917 following set. */
1918 address_taken = 1;
1919 cgraph_node *node = ultimate_alias_target ();
1920 node->address_taken = 1;
1921 }
1922
1923 /* Return local info for the compiled function. */
1924
1925 cgraph_local_info *
1926 cgraph_node::local_info (tree decl)
1927 {
1928 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1929 cgraph_node *node = get (decl);
1930 if (!node)
1931 return NULL;
1932 return &node->ultimate_alias_target ()->local;
1933 }
1934
1935 /* Return local info for the compiled function. */
1936
1937 cgraph_rtl_info *
1938 cgraph_node::rtl_info (tree decl)
1939 {
1940 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1941 cgraph_node *node = get (decl);
1942 if (!node)
1943 return NULL;
1944 node = node->ultimate_alias_target ();
1945 if (node->decl != current_function_decl
1946 && !TREE_ASM_WRITTEN (node->decl))
1947 return NULL;
1948 /* Allocate if it doesnt exist. */
1949 if (node->ultimate_alias_target ()->rtl == NULL)
1950 node->ultimate_alias_target ()->rtl = ggc_cleared_alloc<cgraph_rtl_info> ();
1951 return node->ultimate_alias_target ()->rtl;
1952 }
1953
1954 /* Return a string describing the failure REASON. */
1955
1956 const char*
1957 cgraph_inline_failed_string (cgraph_inline_failed_t reason)
1958 {
1959 #undef DEFCIFCODE
1960 #define DEFCIFCODE(code, type, string) string,
1961
1962 static const char *cif_string_table[CIF_N_REASONS] = {
1963 #include "cif-code.def"
1964 };
1965
1966 /* Signedness of an enum type is implementation defined, so cast it
1967 to unsigned before testing. */
1968 gcc_assert ((unsigned) reason < CIF_N_REASONS);
1969 return cif_string_table[reason];
1970 }
1971
1972 /* Return a type describing the failure REASON. */
1973
1974 cgraph_inline_failed_type_t
1975 cgraph_inline_failed_type (cgraph_inline_failed_t reason)
1976 {
1977 #undef DEFCIFCODE
1978 #define DEFCIFCODE(code, type, string) type,
1979
1980 static cgraph_inline_failed_type_t cif_type_table[CIF_N_REASONS] = {
1981 #include "cif-code.def"
1982 };
1983
1984 /* Signedness of an enum type is implementation defined, so cast it
1985 to unsigned before testing. */
1986 gcc_assert ((unsigned) reason < CIF_N_REASONS);
1987 return cif_type_table[reason];
1988 }
1989
1990 /* Names used to print out the availability enum. */
1991 const char * const cgraph_availability_names[] =
1992 {"unset", "not_available", "overwritable", "available", "local"};
1993
1994 /* Output flags of edge to a file F. */
1995
1996 void
1997 cgraph_edge::dump_edge_flags (FILE *f)
1998 {
1999 if (speculative)
2000 fprintf (f, "(speculative) ");
2001 if (!inline_failed)
2002 fprintf (f, "(inlined) ");
2003 if (indirect_inlining_edge)
2004 fprintf (f, "(indirect_inlining) ");
2005 if (count)
2006 fprintf (f, "(%" PRId64"x) ", (int64_t)count);
2007 if (frequency)
2008 fprintf (f, "(%.2f per call) ", frequency / (double)CGRAPH_FREQ_BASE);
2009 if (can_throw_external)
2010 fprintf (f, "(can throw external) ");
2011 }
2012
2013 /* Dump call graph node to file F. */
2014
2015 void
2016 cgraph_node::dump (FILE *f)
2017 {
2018 cgraph_edge *edge;
2019
2020 dump_base (f);
2021
2022 if (global.inlined_to)
2023 fprintf (f, " Function %s/%i is inline copy in %s/%i\n",
2024 xstrdup_for_dump (name ()),
2025 order,
2026 xstrdup_for_dump (global.inlined_to->name ()),
2027 global.inlined_to->order);
2028 if (clone_of)
2029 fprintf (f, " Clone of %s/%i\n",
2030 clone_of->asm_name (),
2031 clone_of->order);
2032 if (symtab->function_flags_ready)
2033 fprintf (f, " Availability: %s\n",
2034 cgraph_availability_names [get_availability ()]);
2035
2036 if (profile_id)
2037 fprintf (f, " Profile id: %i\n",
2038 profile_id);
2039 fprintf (f, " First run: %i\n", tp_first_run);
2040 fprintf (f, " Function flags:");
2041 if (count)
2042 fprintf (f, " executed %" PRId64"x",
2043 (int64_t)count);
2044 if (origin)
2045 fprintf (f, " nested in: %s", origin->asm_name ());
2046 if (gimple_has_body_p (decl))
2047 fprintf (f, " body");
2048 if (process)
2049 fprintf (f, " process");
2050 if (local.local)
2051 fprintf (f, " local");
2052 if (local.redefined_extern_inline)
2053 fprintf (f, " redefined_extern_inline");
2054 if (only_called_at_startup)
2055 fprintf (f, " only_called_at_startup");
2056 if (only_called_at_exit)
2057 fprintf (f, " only_called_at_exit");
2058 if (tm_clone)
2059 fprintf (f, " tm_clone");
2060 if (icf_merged)
2061 fprintf (f, " icf_merged");
2062 if (merged_comdat)
2063 fprintf (f, " merged_comdat");
2064 if (split_part)
2065 fprintf (f, " split_part");
2066 if (indirect_call_target)
2067 fprintf (f, " indirect_call_target");
2068 if (nonfreeing_fn)
2069 fprintf (f, " nonfreeing_fn");
2070 if (DECL_STATIC_CONSTRUCTOR (decl))
2071 fprintf (f," static_constructor (priority:%i)", get_init_priority ());
2072 if (DECL_STATIC_DESTRUCTOR (decl))
2073 fprintf (f," static_destructor (priority:%i)", get_fini_priority ());
2074 if (frequency == NODE_FREQUENCY_HOT)
2075 fprintf (f, " hot");
2076 if (frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
2077 fprintf (f, " unlikely_executed");
2078 if (frequency == NODE_FREQUENCY_EXECUTED_ONCE)
2079 fprintf (f, " executed_once");
2080 if (only_called_at_startup)
2081 fprintf (f, " only_called_at_startup");
2082 if (only_called_at_exit)
2083 fprintf (f, " only_called_at_exit");
2084 if (opt_for_fn (decl, optimize_size))
2085 fprintf (f, " optimize_size");
2086 if (parallelized_function)
2087 fprintf (f, " parallelized_function");
2088
2089 fprintf (f, "\n");
2090
2091 if (thunk.thunk_p)
2092 {
2093 fprintf (f, " Thunk");
2094 if (thunk.alias)
2095 fprintf (f, " of %s (asm: %s)",
2096 lang_hooks.decl_printable_name (thunk.alias, 2),
2097 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk.alias)));
2098 fprintf (f, " fixed offset %i virtual value %i has "
2099 "virtual offset %i)\n",
2100 (int)thunk.fixed_offset,
2101 (int)thunk.virtual_value,
2102 (int)thunk.virtual_offset_p);
2103 }
2104 if (alias && thunk.alias
2105 && DECL_P (thunk.alias))
2106 {
2107 fprintf (f, " Alias of %s",
2108 lang_hooks.decl_printable_name (thunk.alias, 2));
2109 if (DECL_ASSEMBLER_NAME_SET_P (thunk.alias))
2110 fprintf (f, " (asm: %s)",
2111 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk.alias)));
2112 fprintf (f, "\n");
2113 }
2114
2115 fprintf (f, " Called by: ");
2116
2117 for (edge = callers; edge; edge = edge->next_caller)
2118 {
2119 fprintf (f, "%s/%i ", edge->caller->asm_name (),
2120 edge->caller->order);
2121 edge->dump_edge_flags (f);
2122 }
2123
2124 fprintf (f, "\n Calls: ");
2125 for (edge = callees; edge; edge = edge->next_callee)
2126 {
2127 fprintf (f, "%s/%i ", edge->callee->asm_name (),
2128 edge->callee->order);
2129 edge->dump_edge_flags (f);
2130 }
2131 fprintf (f, "\n");
2132
2133 for (edge = indirect_calls; edge; edge = edge->next_callee)
2134 {
2135 if (edge->indirect_info->polymorphic)
2136 {
2137 fprintf (f, " Polymorphic indirect call of type ");
2138 print_generic_expr (f, edge->indirect_info->otr_type, TDF_SLIM);
2139 fprintf (f, " token:%i", (int) edge->indirect_info->otr_token);
2140 }
2141 else
2142 fprintf (f, " Indirect call");
2143 edge->dump_edge_flags (f);
2144 if (edge->indirect_info->param_index != -1)
2145 {
2146 fprintf (f, " of param:%i", edge->indirect_info->param_index);
2147 if (edge->indirect_info->agg_contents)
2148 fprintf (f, " loaded from %s %s at offset %i",
2149 edge->indirect_info->member_ptr ? "member ptr" : "aggregate",
2150 edge->indirect_info->by_ref ? "passed by reference":"",
2151 (int)edge->indirect_info->offset);
2152 if (edge->indirect_info->vptr_changed)
2153 fprintf (f, " (vptr maybe changed)");
2154 }
2155 fprintf (f, "\n");
2156 if (edge->indirect_info->polymorphic)
2157 edge->indirect_info->context.dump (f);
2158 }
2159
2160 if (instrumentation_clone)
2161 fprintf (f, " Is instrumented version.\n");
2162 else if (instrumented_version)
2163 fprintf (f, " Has instrumented version.\n");
2164 }
2165
2166 /* Dump call graph node NODE to stderr. */
2167
2168 DEBUG_FUNCTION void
2169 cgraph_node::debug (void)
2170 {
2171 dump (stderr);
2172 }
2173
2174 /* Dump the callgraph to file F. */
2175
2176 void
2177 cgraph_node::dump_cgraph (FILE *f)
2178 {
2179 cgraph_node *node;
2180
2181 fprintf (f, "callgraph:\n\n");
2182 FOR_EACH_FUNCTION (node)
2183 node->dump (f);
2184 }
2185
2186 /* Return true when the DECL can possibly be inlined. */
2187
2188 bool
2189 cgraph_function_possibly_inlined_p (tree decl)
2190 {
2191 if (!symtab->global_info_ready)
2192 return !DECL_UNINLINABLE (decl);
2193 return DECL_POSSIBLY_INLINED (decl);
2194 }
2195
2196 /* cgraph_node is no longer nested function; update cgraph accordingly. */
2197 void
2198 cgraph_node::unnest (void)
2199 {
2200 cgraph_node **node2 = &origin->nested;
2201 gcc_assert (origin);
2202
2203 while (*node2 != this)
2204 node2 = &(*node2)->next_nested;
2205 *node2 = next_nested;
2206 origin = NULL;
2207 }
2208
2209 /* Return function availability. See cgraph.h for description of individual
2210 return values. */
2211 enum availability
2212 cgraph_node::get_availability (symtab_node *ref)
2213 {
2214 if (ref)
2215 {
2216 cgraph_node *cref = dyn_cast <cgraph_node *> (ref);
2217 if (cref)
2218 ref = cref->global.inlined_to;
2219 }
2220 enum availability avail;
2221 if (!analyzed)
2222 avail = AVAIL_NOT_AVAILABLE;
2223 else if (local.local)
2224 avail = AVAIL_LOCAL;
2225 else if (global.inlined_to)
2226 avail = AVAIL_AVAILABLE;
2227 else if (transparent_alias)
2228 ultimate_alias_target (&avail, ref);
2229 else if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (decl)))
2230 avail = AVAIL_INTERPOSABLE;
2231 else if (!externally_visible)
2232 avail = AVAIL_AVAILABLE;
2233 /* If this is a reference from symbol itself and there are no aliases, we
2234 may be sure that the symbol was not interposed by soemthing else because
2235 the symbol itself would be unreachable otherwise.
2236
2237 Also comdat groups are always resolved in groups. */
2238 else if ((this == ref && !has_aliases_p ())
2239 || (ref && get_comdat_group ()
2240 && get_comdat_group () == ref->get_comdat_group ()))
2241 avail = AVAIL_AVAILABLE;
2242 /* Inline functions are safe to be analyzed even if their symbol can
2243 be overwritten at runtime. It is not meaningful to enforce any sane
2244 behavior on replacing inline function by different body. */
2245 else if (DECL_DECLARED_INLINE_P (decl))
2246 avail = AVAIL_AVAILABLE;
2247
2248 /* If the function can be overwritten, return OVERWRITABLE. Take
2249 care at least of two notable extensions - the COMDAT functions
2250 used to share template instantiations in C++ (this is symmetric
2251 to code cp_cannot_inline_tree_fn and probably shall be shared and
2252 the inlinability hooks completely eliminated). */
2253
2254 else if (decl_replaceable_p (decl) && !DECL_EXTERNAL (decl))
2255 avail = AVAIL_INTERPOSABLE;
2256 else avail = AVAIL_AVAILABLE;
2257
2258 return avail;
2259 }
2260
2261 /* Worker for cgraph_node_can_be_local_p. */
2262 static bool
2263 cgraph_node_cannot_be_local_p_1 (cgraph_node *node, void *)
2264 {
2265 return !(!node->force_output
2266 && ((DECL_COMDAT (node->decl)
2267 && !node->forced_by_abi
2268 && !node->used_from_object_file_p ()
2269 && !node->same_comdat_group)
2270 || !node->externally_visible));
2271 }
2272
2273 /* Return true if cgraph_node can be made local for API change.
2274 Extern inline functions and C++ COMDAT functions can be made local
2275 at the expense of possible code size growth if function is used in multiple
2276 compilation units. */
2277 bool
2278 cgraph_node::can_be_local_p (void)
2279 {
2280 return (!address_taken
2281 && !call_for_symbol_thunks_and_aliases (cgraph_node_cannot_be_local_p_1,
2282 NULL, true));
2283 }
2284
2285 /* Call callback on cgraph_node, thunks and aliases associated to cgraph_node.
2286 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
2287 skipped. When EXCLUDE_VIRTUAL_THUNKS is true, virtual thunks are
2288 skipped. */
2289 bool
2290 cgraph_node::call_for_symbol_thunks_and_aliases (bool (*callback)
2291 (cgraph_node *, void *),
2292 void *data,
2293 bool include_overwritable,
2294 bool exclude_virtual_thunks)
2295 {
2296 cgraph_edge *e;
2297 ipa_ref *ref;
2298
2299 if (callback (this, data))
2300 return true;
2301 FOR_EACH_ALIAS (this, ref)
2302 {
2303 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2304 if (include_overwritable
2305 || alias->get_availability () > AVAIL_INTERPOSABLE)
2306 if (alias->call_for_symbol_thunks_and_aliases (callback, data,
2307 include_overwritable,
2308 exclude_virtual_thunks))
2309 return true;
2310 }
2311 if (get_availability () <= AVAIL_INTERPOSABLE)
2312 return false;
2313 for (e = callers; e; e = e->next_caller)
2314 if (e->caller->thunk.thunk_p
2315 && (include_overwritable
2316 || e->caller->get_availability () > AVAIL_INTERPOSABLE)
2317 && !(exclude_virtual_thunks
2318 && e->caller->thunk.virtual_offset_p))
2319 if (e->caller->call_for_symbol_thunks_and_aliases (callback, data,
2320 include_overwritable,
2321 exclude_virtual_thunks))
2322 return true;
2323
2324 return false;
2325 }
2326
2327 /* Worker to bring NODE local. */
2328
2329 bool
2330 cgraph_node::make_local (cgraph_node *node, void *)
2331 {
2332 gcc_checking_assert (node->can_be_local_p ());
2333 if (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
2334 {
2335 node->make_decl_local ();
2336 node->set_section (NULL);
2337 node->set_comdat_group (NULL);
2338 node->externally_visible = false;
2339 node->forced_by_abi = false;
2340 node->local.local = true;
2341 node->set_section (NULL);
2342 node->unique_name = ((node->resolution == LDPR_PREVAILING_DEF_IRONLY
2343 || node->resolution == LDPR_PREVAILING_DEF_IRONLY_EXP)
2344 && !flag_incremental_link);
2345 node->resolution = LDPR_PREVAILING_DEF_IRONLY;
2346 gcc_assert (node->get_availability () == AVAIL_LOCAL);
2347 }
2348 return false;
2349 }
2350
2351 /* Bring cgraph node local. */
2352
2353 void
2354 cgraph_node::make_local (void)
2355 {
2356 call_for_symbol_thunks_and_aliases (cgraph_node::make_local, NULL, true);
2357 }
2358
2359 /* Worker to set nothrow flag. */
2360
2361 static bool
2362 cgraph_set_nothrow_flag_1 (cgraph_node *node, void *data)
2363 {
2364 cgraph_edge *e;
2365
2366 TREE_NOTHROW (node->decl) = data != NULL;
2367
2368 if (data != NULL)
2369 for (e = node->callers; e; e = e->next_caller)
2370 e->can_throw_external = false;
2371 return false;
2372 }
2373
2374 /* Set TREE_NOTHROW on NODE's decl and on aliases of NODE
2375 if any to NOTHROW. */
2376
2377 void
2378 cgraph_node::set_nothrow_flag (bool nothrow)
2379 {
2380 call_for_symbol_thunks_and_aliases (cgraph_set_nothrow_flag_1,
2381 (void *)(size_t)nothrow, nothrow == true);
2382 }
2383
2384 /* Worker to set_const_flag. */
2385
2386 static void
2387 set_const_flag_1 (cgraph_node *node, bool set_const, bool looping,
2388 bool *changed)
2389 {
2390 /* Static constructors and destructors without a side effect can be
2391 optimized out. */
2392 if (set_const && !looping)
2393 {
2394 if (DECL_STATIC_CONSTRUCTOR (node->decl))
2395 {
2396 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2397 *changed = true;
2398 }
2399 if (DECL_STATIC_DESTRUCTOR (node->decl))
2400 {
2401 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2402 *changed = true;
2403 }
2404 }
2405 if (!set_const)
2406 {
2407 if (TREE_READONLY (node->decl))
2408 {
2409 TREE_READONLY (node->decl) = 0;
2410 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2411 *changed = true;
2412 }
2413 }
2414 else
2415 {
2416 /* Consider function:
2417
2418 bool a(int *p)
2419 {
2420 return *p==*p;
2421 }
2422
2423 During early optimization we will turn this into:
2424
2425 bool a(int *p)
2426 {
2427 return true;
2428 }
2429
2430 Now if this function will be detected as CONST however when interposed
2431 it may end up being just pure. We always must assume the worst
2432 scenario here. */
2433 if (TREE_READONLY (node->decl))
2434 {
2435 if (!looping && DECL_LOOPING_CONST_OR_PURE_P (node->decl))
2436 {
2437 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2438 *changed = true;
2439 }
2440 }
2441 else if (node->binds_to_current_def_p ())
2442 {
2443 TREE_READONLY (node->decl) = true;
2444 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = looping;
2445 DECL_PURE_P (node->decl) = false;
2446 *changed = true;
2447 }
2448 else
2449 {
2450 if (dump_file && (dump_flags & TDF_DETAILS))
2451 fprintf (dump_file, "Dropping state to PURE because function does "
2452 "not bind to current def.\n");
2453 if (!DECL_PURE_P (node->decl))
2454 {
2455 DECL_PURE_P (node->decl) = true;
2456 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = looping;
2457 *changed = true;
2458 }
2459 else if (!looping && DECL_LOOPING_CONST_OR_PURE_P (node->decl))
2460 {
2461 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2462 *changed = true;
2463 }
2464 }
2465 }
2466
2467 ipa_ref *ref;
2468 FOR_EACH_ALIAS (node, ref)
2469 {
2470 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2471 if (!set_const || alias->get_availability () > AVAIL_INTERPOSABLE)
2472 set_const_flag_1 (alias, set_const, looping, changed);
2473 }
2474 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
2475 if (e->caller->thunk.thunk_p
2476 && (!set_const || e->caller->get_availability () > AVAIL_INTERPOSABLE))
2477 {
2478 /* Virtual thunks access virtual offset in the vtable, so they can
2479 only be pure, never const. */
2480 if (set_const
2481 && (e->caller->thunk.virtual_offset_p
2482 || !node->binds_to_current_def_p (e->caller)))
2483 *changed |= e->caller->set_pure_flag (true, looping);
2484 else
2485 set_const_flag_1 (e->caller, set_const, looping, changed);
2486 }
2487 }
2488
2489 /* If SET_CONST is true, mark function, aliases and thunks to be ECF_CONST.
2490 If SET_CONST if false, clear the flag.
2491
2492 When setting the flag be careful about possible interposition and
2493 do not set the flag for functions that can be interposet and set pure
2494 flag for functions that can bind to other definition.
2495
2496 Return true if any change was done. */
2497
2498 bool
2499 cgraph_node::set_const_flag (bool set_const, bool looping)
2500 {
2501 bool changed = false;
2502 if (!set_const || get_availability () > AVAIL_INTERPOSABLE)
2503 set_const_flag_1 (this, set_const, looping, &changed);
2504 else
2505 {
2506 ipa_ref *ref;
2507
2508 FOR_EACH_ALIAS (this, ref)
2509 {
2510 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2511 if (!set_const || alias->get_availability () > AVAIL_INTERPOSABLE)
2512 set_const_flag_1 (alias, set_const, looping, &changed);
2513 }
2514 }
2515 return changed;
2516 }
2517
2518 /* Info used by set_pure_flag_1. */
2519
2520 struct
2521 set_pure_flag_info
2522 {
2523 bool pure;
2524 bool looping;
2525 bool changed;
2526 };
2527
2528 /* Worker to set_pure_flag. */
2529
2530 static bool
2531 set_pure_flag_1 (cgraph_node *node, void *data)
2532 {
2533 struct set_pure_flag_info *info = (struct set_pure_flag_info *)data;
2534 /* Static constructors and destructors without a side effect can be
2535 optimized out. */
2536 if (info->pure && !info->looping)
2537 {
2538 if (DECL_STATIC_CONSTRUCTOR (node->decl))
2539 {
2540 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2541 info->changed = true;
2542 }
2543 if (DECL_STATIC_DESTRUCTOR (node->decl))
2544 {
2545 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2546 info->changed = true;
2547 }
2548 }
2549 if (info->pure)
2550 {
2551 if (!DECL_PURE_P (node->decl) && !TREE_READONLY (node->decl))
2552 {
2553 DECL_PURE_P (node->decl) = true;
2554 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = info->looping;
2555 info->changed = true;
2556 }
2557 else if (DECL_LOOPING_CONST_OR_PURE_P (node->decl)
2558 && !info->looping)
2559 {
2560 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2561 info->changed = true;
2562 }
2563 }
2564 else
2565 {
2566 if (DECL_PURE_P (node->decl))
2567 {
2568 DECL_PURE_P (node->decl) = false;
2569 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2570 info->changed = true;
2571 }
2572 }
2573 return false;
2574 }
2575
2576 /* Set DECL_PURE_P on cgraph_node's decl and on aliases of the node
2577 if any to PURE.
2578
2579 When setting the flag, be careful about possible interposition.
2580 Return true if any change was done. */
2581
2582 bool
2583 cgraph_node::set_pure_flag (bool pure, bool looping)
2584 {
2585 struct set_pure_flag_info info = {pure, looping, false};
2586 if (!pure)
2587 looping = false;
2588 call_for_symbol_thunks_and_aliases (set_pure_flag_1, &info, !pure, true);
2589 return info.changed;
2590 }
2591
2592 /* Return true when cgraph_node can not return or throw and thus
2593 it is safe to ignore its side effects for IPA analysis. */
2594
2595 bool
2596 cgraph_node::cannot_return_p (void)
2597 {
2598 int flags = flags_from_decl_or_type (decl);
2599 if (!opt_for_fn (decl, flag_exceptions))
2600 return (flags & ECF_NORETURN) != 0;
2601 else
2602 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2603 == (ECF_NORETURN | ECF_NOTHROW));
2604 }
2605
2606 /* Return true when call of edge can not lead to return from caller
2607 and thus it is safe to ignore its side effects for IPA analysis
2608 when computing side effects of the caller.
2609 FIXME: We could actually mark all edges that have no reaching
2610 patch to the exit block or throw to get better results. */
2611 bool
2612 cgraph_edge::cannot_lead_to_return_p (void)
2613 {
2614 if (caller->cannot_return_p ())
2615 return true;
2616 if (indirect_unknown_callee)
2617 {
2618 int flags = indirect_info->ecf_flags;
2619 if (!opt_for_fn (caller->decl, flag_exceptions))
2620 return (flags & ECF_NORETURN) != 0;
2621 else
2622 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2623 == (ECF_NORETURN | ECF_NOTHROW));
2624 }
2625 else
2626 return callee->cannot_return_p ();
2627 }
2628
2629 /* Return true if the call can be hot. */
2630
2631 bool
2632 cgraph_edge::maybe_hot_p (void)
2633 {
2634 /* TODO: Export profile_status from cfun->cfg to cgraph_node. */
2635 if (profile_info
2636 && opt_for_fn (caller->decl, flag_branch_probabilities)
2637 && !maybe_hot_count_p (NULL, count))
2638 return false;
2639 if (caller->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED
2640 || (callee
2641 && callee->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED))
2642 return false;
2643 if (caller->frequency > NODE_FREQUENCY_UNLIKELY_EXECUTED
2644 && (callee
2645 && callee->frequency <= NODE_FREQUENCY_EXECUTED_ONCE))
2646 return false;
2647 if (opt_for_fn (caller->decl, optimize_size))
2648 return false;
2649 if (caller->frequency == NODE_FREQUENCY_HOT)
2650 return true;
2651 if (caller->frequency == NODE_FREQUENCY_EXECUTED_ONCE
2652 && frequency < CGRAPH_FREQ_BASE * 3 / 2)
2653 return false;
2654 if (opt_for_fn (caller->decl, flag_guess_branch_prob))
2655 {
2656 if (PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION) == 0
2657 || frequency <= (CGRAPH_FREQ_BASE
2658 / PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION)))
2659 return false;
2660 }
2661 return true;
2662 }
2663
2664 /* Worker for cgraph_can_remove_if_no_direct_calls_p. */
2665
2666 static bool
2667 nonremovable_p (cgraph_node *node, void *)
2668 {
2669 return !node->can_remove_if_no_direct_calls_and_refs_p ();
2670 }
2671
2672 /* Return true if whole comdat group can be removed if there are no direct
2673 calls to THIS. */
2674
2675 bool
2676 cgraph_node::can_remove_if_no_direct_calls_p (bool will_inline)
2677 {
2678 struct ipa_ref *ref;
2679
2680 /* For local symbols or non-comdat group it is the same as
2681 can_remove_if_no_direct_calls_p. */
2682 if (!externally_visible || !same_comdat_group)
2683 {
2684 if (DECL_EXTERNAL (decl))
2685 return true;
2686 if (address_taken)
2687 return false;
2688 return !call_for_symbol_and_aliases (nonremovable_p, NULL, true);
2689 }
2690
2691 if (will_inline && address_taken)
2692 return false;
2693
2694 /* Otheriwse check if we can remove the symbol itself and then verify
2695 that only uses of the comdat groups are direct call to THIS
2696 or its aliases. */
2697 if (!can_remove_if_no_direct_calls_and_refs_p ())
2698 return false;
2699
2700 /* Check that all refs come from within the comdat group. */
2701 for (int i = 0; iterate_referring (i, ref); i++)
2702 if (ref->referring->get_comdat_group () != get_comdat_group ())
2703 return false;
2704
2705 struct cgraph_node *target = ultimate_alias_target ();
2706 for (cgraph_node *next = dyn_cast<cgraph_node *> (same_comdat_group);
2707 next != this; next = dyn_cast<cgraph_node *> (next->same_comdat_group))
2708 {
2709 if (!externally_visible)
2710 continue;
2711 if (!next->alias
2712 && !next->can_remove_if_no_direct_calls_and_refs_p ())
2713 return false;
2714
2715 /* If we see different symbol than THIS, be sure to check calls. */
2716 if (next->ultimate_alias_target () != target)
2717 for (cgraph_edge *e = next->callers; e; e = e->next_caller)
2718 if (e->caller->get_comdat_group () != get_comdat_group ()
2719 || will_inline)
2720 return false;
2721
2722 /* If function is not being inlined, we care only about
2723 references outside of the comdat group. */
2724 if (!will_inline)
2725 for (int i = 0; next->iterate_referring (i, ref); i++)
2726 if (ref->referring->get_comdat_group () != get_comdat_group ())
2727 return false;
2728 }
2729 return true;
2730 }
2731
2732 /* Return true when function cgraph_node can be expected to be removed
2733 from program when direct calls in this compilation unit are removed.
2734
2735 As a special case COMDAT functions are
2736 cgraph_can_remove_if_no_direct_calls_p while the are not
2737 cgraph_only_called_directly_p (it is possible they are called from other
2738 unit)
2739
2740 This function behaves as cgraph_only_called_directly_p because eliminating
2741 all uses of COMDAT function does not make it necessarily disappear from
2742 the program unless we are compiling whole program or we do LTO. In this
2743 case we know we win since dynamic linking will not really discard the
2744 linkonce section. */
2745
2746 bool
2747 cgraph_node::will_be_removed_from_program_if_no_direct_calls_p
2748 (bool will_inline)
2749 {
2750 gcc_assert (!global.inlined_to);
2751 if (DECL_EXTERNAL (decl))
2752 return true;
2753
2754 if (!in_lto_p && !flag_whole_program)
2755 {
2756 /* If the symbol is in comdat group, we need to verify that whole comdat
2757 group becomes unreachable. Technically we could skip references from
2758 within the group, too. */
2759 if (!only_called_directly_p ())
2760 return false;
2761 if (same_comdat_group && externally_visible)
2762 {
2763 struct cgraph_node *target = ultimate_alias_target ();
2764
2765 if (will_inline && address_taken)
2766 return true;
2767 for (cgraph_node *next = dyn_cast<cgraph_node *> (same_comdat_group);
2768 next != this;
2769 next = dyn_cast<cgraph_node *> (next->same_comdat_group))
2770 {
2771 if (!externally_visible)
2772 continue;
2773 if (!next->alias
2774 && !next->only_called_directly_p ())
2775 return false;
2776
2777 /* If we see different symbol than THIS,
2778 be sure to check calls. */
2779 if (next->ultimate_alias_target () != target)
2780 for (cgraph_edge *e = next->callers; e; e = e->next_caller)
2781 if (e->caller->get_comdat_group () != get_comdat_group ()
2782 || will_inline)
2783 return false;
2784 }
2785 }
2786 return true;
2787 }
2788 else
2789 return can_remove_if_no_direct_calls_p (will_inline);
2790 }
2791
2792
2793 /* Worker for cgraph_only_called_directly_p. */
2794
2795 static bool
2796 cgraph_not_only_called_directly_p_1 (cgraph_node *node, void *)
2797 {
2798 return !node->only_called_directly_or_aliased_p ();
2799 }
2800
2801 /* Return true when function cgraph_node and all its aliases are only called
2802 directly.
2803 i.e. it is not externally visible, address was not taken and
2804 it is not used in any other non-standard way. */
2805
2806 bool
2807 cgraph_node::only_called_directly_p (void)
2808 {
2809 gcc_assert (ultimate_alias_target () == this);
2810 return !call_for_symbol_and_aliases (cgraph_not_only_called_directly_p_1,
2811 NULL, true);
2812 }
2813
2814
2815 /* Collect all callers of NODE. Worker for collect_callers_of_node. */
2816
2817 static bool
2818 collect_callers_of_node_1 (cgraph_node *node, void *data)
2819 {
2820 vec<cgraph_edge *> *redirect_callers = (vec<cgraph_edge *> *)data;
2821 cgraph_edge *cs;
2822 enum availability avail;
2823 node->ultimate_alias_target (&avail);
2824
2825 if (avail > AVAIL_INTERPOSABLE)
2826 for (cs = node->callers; cs != NULL; cs = cs->next_caller)
2827 if (!cs->indirect_inlining_edge
2828 && !cs->caller->thunk.thunk_p)
2829 redirect_callers->safe_push (cs);
2830 return false;
2831 }
2832
2833 /* Collect all callers of cgraph_node and its aliases that are known to lead to
2834 cgraph_node (i.e. are not overwritable). */
2835
2836 vec<cgraph_edge *>
2837 cgraph_node::collect_callers (void)
2838 {
2839 vec<cgraph_edge *> redirect_callers = vNULL;
2840 call_for_symbol_thunks_and_aliases (collect_callers_of_node_1,
2841 &redirect_callers, false);
2842 return redirect_callers;
2843 }
2844
2845 /* Return TRUE if NODE2 a clone of NODE or is equivalent to it. */
2846
2847 static bool
2848 clone_of_p (cgraph_node *node, cgraph_node *node2)
2849 {
2850 bool skipped_thunk = false;
2851 node = node->ultimate_alias_target ();
2852 node2 = node2->ultimate_alias_target ();
2853
2854 /* There are no virtual clones of thunks so check former_clone_of or if we
2855 might have skipped thunks because this adjustments are no longer
2856 necessary. */
2857 while (node->thunk.thunk_p)
2858 {
2859 if (node2->former_clone_of == node->decl)
2860 return true;
2861 if (!node->thunk.this_adjusting)
2862 return false;
2863 node = node->callees->callee->ultimate_alias_target ();
2864 skipped_thunk = true;
2865 }
2866
2867 if (skipped_thunk)
2868 {
2869 if (!node2->clone.args_to_skip
2870 || !bitmap_bit_p (node2->clone.args_to_skip, 0))
2871 return false;
2872 if (node2->former_clone_of == node->decl)
2873 return true;
2874 else if (!node2->clone_of)
2875 return false;
2876 }
2877
2878 while (node != node2 && node2)
2879 node2 = node2->clone_of;
2880 return node2 != NULL;
2881 }
2882
2883 /* Verify edge count and frequency. */
2884
2885 bool
2886 cgraph_edge::verify_count_and_frequency ()
2887 {
2888 bool error_found = false;
2889 if (count < 0)
2890 {
2891 error ("caller edge count is negative");
2892 error_found = true;
2893 }
2894 if (frequency < 0)
2895 {
2896 error ("caller edge frequency is negative");
2897 error_found = true;
2898 }
2899 if (frequency > CGRAPH_FREQ_MAX)
2900 {
2901 error ("caller edge frequency is too large");
2902 error_found = true;
2903 }
2904 return error_found;
2905 }
2906
2907 /* Switch to THIS_CFUN if needed and print STMT to stderr. */
2908 static void
2909 cgraph_debug_gimple_stmt (function *this_cfun, gimple *stmt)
2910 {
2911 bool fndecl_was_null = false;
2912 /* debug_gimple_stmt needs correct cfun */
2913 if (cfun != this_cfun)
2914 set_cfun (this_cfun);
2915 /* ...and an actual current_function_decl */
2916 if (!current_function_decl)
2917 {
2918 current_function_decl = this_cfun->decl;
2919 fndecl_was_null = true;
2920 }
2921 debug_gimple_stmt (stmt);
2922 if (fndecl_was_null)
2923 current_function_decl = NULL;
2924 }
2925
2926 /* Verify that call graph edge corresponds to DECL from the associated
2927 statement. Return true if the verification should fail. */
2928
2929 bool
2930 cgraph_edge::verify_corresponds_to_fndecl (tree decl)
2931 {
2932 cgraph_node *node;
2933
2934 if (!decl || callee->global.inlined_to)
2935 return false;
2936 if (symtab->state == LTO_STREAMING)
2937 return false;
2938 node = cgraph_node::get (decl);
2939
2940 /* We do not know if a node from a different partition is an alias or what it
2941 aliases and therefore cannot do the former_clone_of check reliably. When
2942 body_removed is set, we have lost all information about what was alias or
2943 thunk of and also cannot proceed. */
2944 if (!node
2945 || node->body_removed
2946 || node->in_other_partition
2947 || callee->icf_merged
2948 || callee->in_other_partition)
2949 return false;
2950
2951 node = node->ultimate_alias_target ();
2952
2953 /* Optimizers can redirect unreachable calls or calls triggering undefined
2954 behavior to builtin_unreachable. */
2955 if (DECL_BUILT_IN_CLASS (callee->decl) == BUILT_IN_NORMAL
2956 && DECL_FUNCTION_CODE (callee->decl) == BUILT_IN_UNREACHABLE)
2957 return false;
2958
2959 if (callee->former_clone_of != node->decl
2960 && (node != callee->ultimate_alias_target ())
2961 && !clone_of_p (node, callee))
2962 return true;
2963 else
2964 return false;
2965 }
2966
2967 /* Verify cgraph nodes of given cgraph node. */
2968 DEBUG_FUNCTION void
2969 cgraph_node::verify_node (void)
2970 {
2971 cgraph_edge *e;
2972 function *this_cfun = DECL_STRUCT_FUNCTION (decl);
2973 basic_block this_block;
2974 gimple_stmt_iterator gsi;
2975 bool error_found = false;
2976
2977 if (seen_error ())
2978 return;
2979
2980 timevar_push (TV_CGRAPH_VERIFY);
2981 error_found |= verify_base ();
2982 for (e = callees; e; e = e->next_callee)
2983 if (e->aux)
2984 {
2985 error ("aux field set for edge %s->%s",
2986 identifier_to_locale (e->caller->name ()),
2987 identifier_to_locale (e->callee->name ()));
2988 error_found = true;
2989 }
2990 if (count < 0)
2991 {
2992 error ("execution count is negative");
2993 error_found = true;
2994 }
2995 if (global.inlined_to && same_comdat_group)
2996 {
2997 error ("inline clone in same comdat group list");
2998 error_found = true;
2999 }
3000 if (!definition && !in_other_partition && local.local)
3001 {
3002 error ("local symbols must be defined");
3003 error_found = true;
3004 }
3005 if (global.inlined_to && externally_visible)
3006 {
3007 error ("externally visible inline clone");
3008 error_found = true;
3009 }
3010 if (global.inlined_to && address_taken)
3011 {
3012 error ("inline clone with address taken");
3013 error_found = true;
3014 }
3015 if (global.inlined_to && force_output)
3016 {
3017 error ("inline clone is forced to output");
3018 error_found = true;
3019 }
3020 for (e = indirect_calls; e; e = e->next_callee)
3021 {
3022 if (e->aux)
3023 {
3024 error ("aux field set for indirect edge from %s",
3025 identifier_to_locale (e->caller->name ()));
3026 error_found = true;
3027 }
3028 if (!e->indirect_unknown_callee
3029 || !e->indirect_info)
3030 {
3031 error ("An indirect edge from %s is not marked as indirect or has "
3032 "associated indirect_info, the corresponding statement is: ",
3033 identifier_to_locale (e->caller->name ()));
3034 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3035 error_found = true;
3036 }
3037 }
3038 bool check_comdat = comdat_local_p ();
3039 for (e = callers; e; e = e->next_caller)
3040 {
3041 if (e->verify_count_and_frequency ())
3042 error_found = true;
3043 if (check_comdat
3044 && !in_same_comdat_group_p (e->caller))
3045 {
3046 error ("comdat-local function called by %s outside its comdat",
3047 identifier_to_locale (e->caller->name ()));
3048 error_found = true;
3049 }
3050 if (!e->inline_failed)
3051 {
3052 if (global.inlined_to
3053 != (e->caller->global.inlined_to
3054 ? e->caller->global.inlined_to : e->caller))
3055 {
3056 error ("inlined_to pointer is wrong");
3057 error_found = true;
3058 }
3059 if (callers->next_caller)
3060 {
3061 error ("multiple inline callers");
3062 error_found = true;
3063 }
3064 }
3065 else
3066 if (global.inlined_to)
3067 {
3068 error ("inlined_to pointer set for noninline callers");
3069 error_found = true;
3070 }
3071 }
3072 for (e = callees; e; e = e->next_callee)
3073 {
3074 if (e->verify_count_and_frequency ())
3075 error_found = true;
3076 if (gimple_has_body_p (e->caller->decl)
3077 && !e->caller->global.inlined_to
3078 && !e->speculative
3079 /* Optimized out calls are redirected to __builtin_unreachable. */
3080 && (e->frequency
3081 || e->callee->decl
3082 != builtin_decl_implicit (BUILT_IN_UNREACHABLE))
3083 && (e->frequency
3084 != compute_call_stmt_bb_frequency (e->caller->decl,
3085 gimple_bb (e->call_stmt))))
3086 {
3087 error ("caller edge frequency %i does not match BB frequency %i",
3088 e->frequency,
3089 compute_call_stmt_bb_frequency (e->caller->decl,
3090 gimple_bb (e->call_stmt)));
3091 error_found = true;
3092 }
3093 }
3094 for (e = indirect_calls; e; e = e->next_callee)
3095 {
3096 if (e->verify_count_and_frequency ())
3097 error_found = true;
3098 if (gimple_has_body_p (e->caller->decl)
3099 && !e->caller->global.inlined_to
3100 && !e->speculative
3101 && (e->frequency
3102 != compute_call_stmt_bb_frequency (e->caller->decl,
3103 gimple_bb (e->call_stmt))))
3104 {
3105 error ("indirect call frequency %i does not match BB frequency %i",
3106 e->frequency,
3107 compute_call_stmt_bb_frequency (e->caller->decl,
3108 gimple_bb (e->call_stmt)));
3109 error_found = true;
3110 }
3111 }
3112 if (!callers && global.inlined_to)
3113 {
3114 error ("inlined_to pointer is set but no predecessors found");
3115 error_found = true;
3116 }
3117 if (global.inlined_to == this)
3118 {
3119 error ("inlined_to pointer refers to itself");
3120 error_found = true;
3121 }
3122
3123 if (clone_of)
3124 {
3125 cgraph_node *n;
3126 for (n = clone_of->clones; n; n = n->next_sibling_clone)
3127 if (n == this)
3128 break;
3129 if (!n)
3130 {
3131 error ("cgraph_node has wrong clone_of");
3132 error_found = true;
3133 }
3134 }
3135 if (clones)
3136 {
3137 cgraph_node *n;
3138 for (n = clones; n; n = n->next_sibling_clone)
3139 if (n->clone_of != this)
3140 break;
3141 if (n)
3142 {
3143 error ("cgraph_node has wrong clone list");
3144 error_found = true;
3145 }
3146 }
3147 if ((prev_sibling_clone || next_sibling_clone) && !clone_of)
3148 {
3149 error ("cgraph_node is in clone list but it is not clone");
3150 error_found = true;
3151 }
3152 if (!prev_sibling_clone && clone_of && clone_of->clones != this)
3153 {
3154 error ("cgraph_node has wrong prev_clone pointer");
3155 error_found = true;
3156 }
3157 if (prev_sibling_clone && prev_sibling_clone->next_sibling_clone != this)
3158 {
3159 error ("double linked list of clones corrupted");
3160 error_found = true;
3161 }
3162
3163 if (analyzed && alias)
3164 {
3165 bool ref_found = false;
3166 int i;
3167 ipa_ref *ref = NULL;
3168
3169 if (callees)
3170 {
3171 error ("Alias has call edges");
3172 error_found = true;
3173 }
3174 for (i = 0; iterate_reference (i, ref); i++)
3175 if (ref->use == IPA_REF_CHKP)
3176 ;
3177 else if (ref->use != IPA_REF_ALIAS)
3178 {
3179 error ("Alias has non-alias reference");
3180 error_found = true;
3181 }
3182 else if (ref_found)
3183 {
3184 error ("Alias has more than one alias reference");
3185 error_found = true;
3186 }
3187 else
3188 ref_found = true;
3189 if (!ref_found)
3190 {
3191 error ("Analyzed alias has no reference");
3192 error_found = true;
3193 }
3194 }
3195
3196 /* Check instrumented version reference. */
3197 if (instrumented_version
3198 && instrumented_version->instrumented_version != this)
3199 {
3200 error ("Instrumentation clone does not reference original node");
3201 error_found = true;
3202 }
3203
3204 /* Cannot have orig_decl for not instrumented nodes. */
3205 if (!instrumentation_clone && orig_decl)
3206 {
3207 error ("Not instrumented node has non-NULL original declaration");
3208 error_found = true;
3209 }
3210
3211 /* If original not instrumented node still exists then we may check
3212 original declaration is set properly. */
3213 if (instrumented_version
3214 && orig_decl
3215 && orig_decl != instrumented_version->decl)
3216 {
3217 error ("Instrumented node has wrong original declaration");
3218 error_found = true;
3219 }
3220
3221 /* Check all nodes have chkp reference to their instrumented versions. */
3222 if (analyzed
3223 && instrumented_version
3224 && !instrumentation_clone)
3225 {
3226 bool ref_found = false;
3227 int i;
3228 struct ipa_ref *ref;
3229
3230 for (i = 0; iterate_reference (i, ref); i++)
3231 if (ref->use == IPA_REF_CHKP)
3232 {
3233 if (ref_found)
3234 {
3235 error ("Node has more than one chkp reference");
3236 error_found = true;
3237 }
3238 if (ref->referred != instrumented_version)
3239 {
3240 error ("Wrong node is referenced with chkp reference");
3241 error_found = true;
3242 }
3243 ref_found = true;
3244 }
3245
3246 if (!ref_found)
3247 {
3248 error ("Analyzed node has no reference to instrumented version");
3249 error_found = true;
3250 }
3251 }
3252
3253 if (instrumentation_clone
3254 && DECL_BUILT_IN_CLASS (decl) == NOT_BUILT_IN)
3255 {
3256 tree name = DECL_ASSEMBLER_NAME (decl);
3257 tree orig_name = DECL_ASSEMBLER_NAME (orig_decl);
3258
3259 if (!IDENTIFIER_TRANSPARENT_ALIAS (name)
3260 || TREE_CHAIN (name) != orig_name)
3261 {
3262 error ("Alias chain for instrumented node is broken");
3263 error_found = true;
3264 }
3265 }
3266
3267 if (analyzed && thunk.thunk_p)
3268 {
3269 if (!callees)
3270 {
3271 error ("No edge out of thunk node");
3272 error_found = true;
3273 }
3274 else if (callees->next_callee)
3275 {
3276 error ("More than one edge out of thunk node");
3277 error_found = true;
3278 }
3279 if (gimple_has_body_p (decl))
3280 {
3281 error ("Thunk is not supposed to have body");
3282 error_found = true;
3283 }
3284 if (thunk.add_pointer_bounds_args
3285 && !instrumented_version->semantically_equivalent_p (callees->callee))
3286 {
3287 error ("Instrumentation thunk has wrong edge callee");
3288 error_found = true;
3289 }
3290 }
3291 else if (analyzed && gimple_has_body_p (decl)
3292 && !TREE_ASM_WRITTEN (decl)
3293 && (!DECL_EXTERNAL (decl) || global.inlined_to)
3294 && !flag_wpa)
3295 {
3296 if (this_cfun->cfg)
3297 {
3298 hash_set<gimple *> stmts;
3299 int i;
3300 ipa_ref *ref = NULL;
3301
3302 /* Reach the trees by walking over the CFG, and note the
3303 enclosing basic-blocks in the call edges. */
3304 FOR_EACH_BB_FN (this_block, this_cfun)
3305 {
3306 for (gsi = gsi_start_phis (this_block);
3307 !gsi_end_p (gsi); gsi_next (&gsi))
3308 stmts.add (gsi_stmt (gsi));
3309 for (gsi = gsi_start_bb (this_block);
3310 !gsi_end_p (gsi);
3311 gsi_next (&gsi))
3312 {
3313 gimple *stmt = gsi_stmt (gsi);
3314 stmts.add (stmt);
3315 if (is_gimple_call (stmt))
3316 {
3317 cgraph_edge *e = get_edge (stmt);
3318 tree decl = gimple_call_fndecl (stmt);
3319 if (e)
3320 {
3321 if (e->aux)
3322 {
3323 error ("shared call_stmt:");
3324 cgraph_debug_gimple_stmt (this_cfun, stmt);
3325 error_found = true;
3326 }
3327 if (!e->indirect_unknown_callee)
3328 {
3329 if (e->verify_corresponds_to_fndecl (decl))
3330 {
3331 error ("edge points to wrong declaration:");
3332 debug_tree (e->callee->decl);
3333 fprintf (stderr," Instead of:");
3334 debug_tree (decl);
3335 error_found = true;
3336 }
3337 }
3338 else if (decl)
3339 {
3340 error ("an indirect edge with unknown callee "
3341 "corresponding to a call_stmt with "
3342 "a known declaration:");
3343 error_found = true;
3344 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3345 }
3346 e->aux = (void *)1;
3347 }
3348 else if (decl)
3349 {
3350 error ("missing callgraph edge for call stmt:");
3351 cgraph_debug_gimple_stmt (this_cfun, stmt);
3352 error_found = true;
3353 }
3354 }
3355 }
3356 }
3357 for (i = 0; iterate_reference (i, ref); i++)
3358 if (ref->stmt && !stmts.contains (ref->stmt))
3359 {
3360 error ("reference to dead statement");
3361 cgraph_debug_gimple_stmt (this_cfun, ref->stmt);
3362 error_found = true;
3363 }
3364 }
3365 else
3366 /* No CFG available?! */
3367 gcc_unreachable ();
3368
3369 for (e = callees; e; e = e->next_callee)
3370 {
3371 if (!e->aux)
3372 {
3373 error ("edge %s->%s has no corresponding call_stmt",
3374 identifier_to_locale (e->caller->name ()),
3375 identifier_to_locale (e->callee->name ()));
3376 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3377 error_found = true;
3378 }
3379 e->aux = 0;
3380 }
3381 for (e = indirect_calls; e; e = e->next_callee)
3382 {
3383 if (!e->aux && !e->speculative)
3384 {
3385 error ("an indirect edge from %s has no corresponding call_stmt",
3386 identifier_to_locale (e->caller->name ()));
3387 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3388 error_found = true;
3389 }
3390 e->aux = 0;
3391 }
3392 }
3393 if (error_found)
3394 {
3395 dump (stderr);
3396 internal_error ("verify_cgraph_node failed");
3397 }
3398 timevar_pop (TV_CGRAPH_VERIFY);
3399 }
3400
3401 /* Verify whole cgraph structure. */
3402 DEBUG_FUNCTION void
3403 cgraph_node::verify_cgraph_nodes (void)
3404 {
3405 cgraph_node *node;
3406
3407 if (seen_error ())
3408 return;
3409
3410 FOR_EACH_FUNCTION (node)
3411 node->verify ();
3412 }
3413
3414 /* Walk the alias chain to return the function cgraph_node is alias of.
3415 Walk through thunks, too.
3416 When AVAILABILITY is non-NULL, get minimal availability in the chain.
3417 When REF is non-NULL, assume that reference happens in symbol REF
3418 when determining the availability. */
3419
3420 cgraph_node *
3421 cgraph_node::function_symbol (enum availability *availability,
3422 struct symtab_node *ref)
3423 {
3424 cgraph_node *node = ultimate_alias_target (availability, ref);
3425
3426 while (node->thunk.thunk_p)
3427 {
3428 ref = node;
3429 node = node->callees->callee;
3430 if (availability)
3431 {
3432 enum availability a;
3433 a = node->get_availability (ref);
3434 if (a < *availability)
3435 *availability = a;
3436 }
3437 node = node->ultimate_alias_target (availability, ref);
3438 }
3439 return node;
3440 }
3441
3442 /* Walk the alias chain to return the function cgraph_node is alias of.
3443 Walk through non virtual thunks, too. Thus we return either a function
3444 or a virtual thunk node.
3445 When AVAILABILITY is non-NULL, get minimal availability in the chain.
3446 When REF is non-NULL, assume that reference happens in symbol REF
3447 when determining the availability. */
3448
3449 cgraph_node *
3450 cgraph_node::function_or_virtual_thunk_symbol
3451 (enum availability *availability,
3452 struct symtab_node *ref)
3453 {
3454 cgraph_node *node = ultimate_alias_target (availability, ref);
3455
3456 while (node->thunk.thunk_p && !node->thunk.virtual_offset_p)
3457 {
3458 ref = node;
3459 node = node->callees->callee;
3460 if (availability)
3461 {
3462 enum availability a;
3463 a = node->get_availability (ref);
3464 if (a < *availability)
3465 *availability = a;
3466 }
3467 node = node->ultimate_alias_target (availability, ref);
3468 }
3469 return node;
3470 }
3471
3472 /* When doing LTO, read cgraph_node's body from disk if it is not already
3473 present. */
3474
3475 bool
3476 cgraph_node::get_untransformed_body (void)
3477 {
3478 lto_file_decl_data *file_data;
3479 const char *data, *name;
3480 size_t len;
3481 tree decl = this->decl;
3482
3483 /* Check if body is already there. Either we have gimple body or
3484 the function is thunk and in that case we set DECL_ARGUMENTS. */
3485 if (DECL_ARGUMENTS (decl) || gimple_has_body_p (decl))
3486 return false;
3487
3488 gcc_assert (in_lto_p && !DECL_RESULT (decl));
3489
3490 timevar_push (TV_IPA_LTO_GIMPLE_IN);
3491
3492 file_data = lto_file_data;
3493 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
3494
3495 /* We may have renamed the declaration, e.g., a static function. */
3496 name = lto_get_decl_name_mapping (file_data, name);
3497 struct lto_in_decl_state *decl_state
3498 = lto_get_function_in_decl_state (file_data, decl);
3499
3500 data = lto_get_section_data (file_data, LTO_section_function_body,
3501 name, &len, decl_state->compressed);
3502 if (!data)
3503 fatal_error (input_location, "%s: section %s is missing",
3504 file_data->file_name,
3505 name);
3506
3507 gcc_assert (DECL_STRUCT_FUNCTION (decl) == NULL);
3508
3509 lto_input_function_body (file_data, this, data);
3510 lto_stats.num_function_bodies++;
3511 lto_free_section_data (file_data, LTO_section_function_body, name,
3512 data, len, decl_state->compressed);
3513 lto_free_function_in_decl_state_for_node (this);
3514 /* Keep lto file data so ipa-inline-analysis knows about cross module
3515 inlining. */
3516
3517 timevar_pop (TV_IPA_LTO_GIMPLE_IN);
3518
3519 return true;
3520 }
3521
3522 /* Prepare function body. When doing LTO, read cgraph_node's body from disk
3523 if it is not already present. When some IPA transformations are scheduled,
3524 apply them. */
3525
3526 bool
3527 cgraph_node::get_body (void)
3528 {
3529 bool updated;
3530
3531 updated = get_untransformed_body ();
3532
3533 /* Getting transformed body makes no sense for inline clones;
3534 we should never use this on real clones because they are materialized
3535 early.
3536 TODO: Materializing clones here will likely lead to smaller LTRANS
3537 footprint. */
3538 gcc_assert (!global.inlined_to && !clone_of);
3539 if (ipa_transforms_to_apply.exists ())
3540 {
3541 opt_pass *saved_current_pass = current_pass;
3542 FILE *saved_dump_file = dump_file;
3543 const char *saved_dump_file_name = dump_file_name;
3544 int saved_dump_flags = dump_flags;
3545 dump_file_name = NULL;
3546 dump_file = NULL;
3547
3548 push_cfun (DECL_STRUCT_FUNCTION (decl));
3549 execute_all_ipa_transforms ();
3550 cgraph_edge::rebuild_edges ();
3551 free_dominance_info (CDI_DOMINATORS);
3552 free_dominance_info (CDI_POST_DOMINATORS);
3553 pop_cfun ();
3554 updated = true;
3555
3556 current_pass = saved_current_pass;
3557 dump_file = saved_dump_file;
3558 dump_file_name = saved_dump_file_name;
3559 dump_flags = saved_dump_flags;
3560 }
3561 return updated;
3562 }
3563
3564 /* Return the DECL_STRUCT_FUNCTION of the function. */
3565
3566 struct function *
3567 cgraph_node::get_fun (void)
3568 {
3569 cgraph_node *node = this;
3570 struct function *fun = DECL_STRUCT_FUNCTION (node->decl);
3571
3572 while (!fun && node->clone_of)
3573 {
3574 node = node->clone_of;
3575 fun = DECL_STRUCT_FUNCTION (node->decl);
3576 }
3577
3578 return fun;
3579 }
3580
3581 /* Verify if the type of the argument matches that of the function
3582 declaration. If we cannot verify this or there is a mismatch,
3583 return false. */
3584
3585 static bool
3586 gimple_check_call_args (gimple *stmt, tree fndecl, bool args_count_match)
3587 {
3588 tree parms, p;
3589 unsigned int i, nargs;
3590
3591 /* Calls to internal functions always match their signature. */
3592 if (gimple_call_internal_p (stmt))
3593 return true;
3594
3595 nargs = gimple_call_num_args (stmt);
3596
3597 /* Get argument types for verification. */
3598 if (fndecl)
3599 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3600 else
3601 parms = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
3602
3603 /* Verify if the type of the argument matches that of the function
3604 declaration. If we cannot verify this or there is a mismatch,
3605 return false. */
3606 if (fndecl && DECL_ARGUMENTS (fndecl))
3607 {
3608 for (i = 0, p = DECL_ARGUMENTS (fndecl);
3609 i < nargs;
3610 i++, p = DECL_CHAIN (p))
3611 {
3612 tree arg;
3613 /* We cannot distinguish a varargs function from the case
3614 of excess parameters, still deferring the inlining decision
3615 to the callee is possible. */
3616 if (!p)
3617 break;
3618 arg = gimple_call_arg (stmt, i);
3619 if (p == error_mark_node
3620 || DECL_ARG_TYPE (p) == error_mark_node
3621 || arg == error_mark_node
3622 || (!types_compatible_p (DECL_ARG_TYPE (p), TREE_TYPE (arg))
3623 && !fold_convertible_p (DECL_ARG_TYPE (p), arg)))
3624 return false;
3625 }
3626 if (args_count_match && p)
3627 return false;
3628 }
3629 else if (parms)
3630 {
3631 for (i = 0, p = parms; i < nargs; i++, p = TREE_CHAIN (p))
3632 {
3633 tree arg;
3634 /* If this is a varargs function defer inlining decision
3635 to callee. */
3636 if (!p)
3637 break;
3638 arg = gimple_call_arg (stmt, i);
3639 if (TREE_VALUE (p) == error_mark_node
3640 || arg == error_mark_node
3641 || TREE_CODE (TREE_VALUE (p)) == VOID_TYPE
3642 || (!types_compatible_p (TREE_VALUE (p), TREE_TYPE (arg))
3643 && !fold_convertible_p (TREE_VALUE (p), arg)))
3644 return false;
3645 }
3646 }
3647 else
3648 {
3649 if (nargs != 0)
3650 return false;
3651 }
3652 return true;
3653 }
3654
3655 /* Verify if the type of the argument and lhs of CALL_STMT matches
3656 that of the function declaration CALLEE. If ARGS_COUNT_MATCH is
3657 true, the arg count needs to be the same.
3658 If we cannot verify this or there is a mismatch, return false. */
3659
3660 bool
3661 gimple_check_call_matching_types (gimple *call_stmt, tree callee,
3662 bool args_count_match)
3663 {
3664 tree lhs;
3665
3666 if ((DECL_RESULT (callee)
3667 && !DECL_BY_REFERENCE (DECL_RESULT (callee))
3668 && (lhs = gimple_call_lhs (call_stmt)) != NULL_TREE
3669 && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee)),
3670 TREE_TYPE (lhs))
3671 && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee)), lhs))
3672 || !gimple_check_call_args (call_stmt, callee, args_count_match))
3673 return false;
3674 return true;
3675 }
3676
3677 /* Reset all state within cgraph.c so that we can rerun the compiler
3678 within the same process. For use by toplev::finalize. */
3679
3680 void
3681 cgraph_c_finalize (void)
3682 {
3683 symtab = NULL;
3684
3685 x_cgraph_nodes_queue = NULL;
3686
3687 cgraph_fnver_htab = NULL;
3688 version_info_node = NULL;
3689 }
3690
3691 /* A wroker for call_for_symbol_and_aliases. */
3692
3693 bool
3694 cgraph_node::call_for_symbol_and_aliases_1 (bool (*callback) (cgraph_node *,
3695 void *),
3696 void *data,
3697 bool include_overwritable)
3698 {
3699 ipa_ref *ref;
3700 FOR_EACH_ALIAS (this, ref)
3701 {
3702 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
3703 if (include_overwritable
3704 || alias->get_availability () > AVAIL_INTERPOSABLE)
3705 if (alias->call_for_symbol_and_aliases (callback, data,
3706 include_overwritable))
3707 return true;
3708 }
3709 return false;
3710 }
3711
3712 /* Return true if NODE has thunk. */
3713
3714 bool
3715 cgraph_node::has_thunk_p (cgraph_node *node, void *)
3716 {
3717 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
3718 if (e->caller->thunk.thunk_p)
3719 return true;
3720 return false;
3721 }
3722
3723 #include "gt-cgraph.h"