Change is-a.h to support typedefs of pointers
[gcc.git] / gcc / cgraph.c
1 /* Callgraph handling code.
2 Copyright (C) 2003-2014 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This file contains basic routines manipulating call graph
22
23 The call-graph is a data structure designed for intra-procedural optimization.
24 It represents a multi-graph where nodes are functions and edges are call sites. */
25
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "tree.h"
31 #include "varasm.h"
32 #include "calls.h"
33 #include "print-tree.h"
34 #include "tree-inline.h"
35 #include "langhooks.h"
36 #include "hashtab.h"
37 #include "toplev.h"
38 #include "flags.h"
39 #include "debug.h"
40 #include "target.h"
41 #include "cgraph.h"
42 #include "intl.h"
43 #include "tree-ssa-alias.h"
44 #include "internal-fn.h"
45 #include "tree-eh.h"
46 #include "gimple-expr.h"
47 #include "gimple.h"
48 #include "gimple-iterator.h"
49 #include "timevar.h"
50 #include "dumpfile.h"
51 #include "gimple-ssa.h"
52 #include "cgraph.h"
53 #include "tree-cfg.h"
54 #include "tree-ssa.h"
55 #include "value-prof.h"
56 #include "except.h"
57 #include "diagnostic-core.h"
58 #include "rtl.h"
59 #include "ipa-utils.h"
60 #include "lto-streamer.h"
61 #include "ipa-inline.h"
62 #include "cfgloop.h"
63 #include "gimple-pretty-print.h"
64 #include "expr.h"
65 #include "tree-dfa.h"
66
67 /* FIXME: Only for PROP_loops, but cgraph shouldn't have to know about this. */
68 #include "tree-pass.h"
69
70 static void cgraph_node_remove_callers (struct cgraph_node *node);
71 static inline void cgraph_edge_remove_caller (struct cgraph_edge *e);
72 static inline void cgraph_edge_remove_callee (struct cgraph_edge *e);
73
74 /* Queue of cgraph nodes scheduled to be lowered. */
75 symtab_node *x_cgraph_nodes_queue;
76 #define cgraph_nodes_queue ((struct cgraph_node *)x_cgraph_nodes_queue)
77
78 /* Number of nodes in existence. */
79 int cgraph_n_nodes;
80
81 /* Maximal uid used in cgraph nodes. */
82 int cgraph_max_uid;
83
84 /* Maximal uid used in cgraph edges. */
85 int cgraph_edge_max_uid;
86
87 /* Set when whole unit has been analyzed so we can access global info. */
88 bool cgraph_global_info_ready = false;
89
90 /* What state callgraph is in right now. */
91 enum cgraph_state cgraph_state = CGRAPH_STATE_PARSING;
92
93 /* Set when the cgraph is fully build and the basic flags are computed. */
94 bool cgraph_function_flags_ready = false;
95
96 /* List of hooks triggered on cgraph_edge events. */
97 struct cgraph_edge_hook_list {
98 cgraph_edge_hook hook;
99 void *data;
100 struct cgraph_edge_hook_list *next;
101 };
102
103 /* List of hooks triggered on cgraph_node events. */
104 struct cgraph_node_hook_list {
105 cgraph_node_hook hook;
106 void *data;
107 struct cgraph_node_hook_list *next;
108 };
109
110 /* List of hooks triggered on events involving two cgraph_edges. */
111 struct cgraph_2edge_hook_list {
112 cgraph_2edge_hook hook;
113 void *data;
114 struct cgraph_2edge_hook_list *next;
115 };
116
117 /* List of hooks triggered on events involving two cgraph_nodes. */
118 struct cgraph_2node_hook_list {
119 cgraph_2node_hook hook;
120 void *data;
121 struct cgraph_2node_hook_list *next;
122 };
123
124 /* List of hooks triggered when an edge is removed. */
125 struct cgraph_edge_hook_list *first_cgraph_edge_removal_hook;
126 /* List of hooks triggered when a node is removed. */
127 struct cgraph_node_hook_list *first_cgraph_node_removal_hook;
128 /* List of hooks triggered when an edge is duplicated. */
129 struct cgraph_2edge_hook_list *first_cgraph_edge_duplicated_hook;
130 /* List of hooks triggered when a node is duplicated. */
131 struct cgraph_2node_hook_list *first_cgraph_node_duplicated_hook;
132 /* List of hooks triggered when an function is inserted. */
133 struct cgraph_node_hook_list *first_cgraph_function_insertion_hook;
134
135 /* Head of a linked list of unused (freed) call graph nodes.
136 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
137 static GTY(()) struct cgraph_node *free_nodes;
138 /* Head of a linked list of unused (freed) call graph edges.
139 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
140 static GTY(()) struct cgraph_edge *free_edges;
141
142 /* Did procss_same_body_aliases run? */
143 bool cpp_implicit_aliases_done;
144
145 /* Map a cgraph_node to cgraph_function_version_info using this htab.
146 The cgraph_function_version_info has a THIS_NODE field that is the
147 corresponding cgraph_node.. */
148
149 static GTY((param_is (struct cgraph_function_version_info))) htab_t
150 cgraph_fnver_htab = NULL;
151
152 /* Hash function for cgraph_fnver_htab. */
153 static hashval_t
154 cgraph_fnver_htab_hash (const void *ptr)
155 {
156 int uid = ((const struct cgraph_function_version_info *)ptr)->this_node->uid;
157 return (hashval_t)(uid);
158 }
159
160 /* eq function for cgraph_fnver_htab. */
161 static int
162 cgraph_fnver_htab_eq (const void *p1, const void *p2)
163 {
164 const struct cgraph_function_version_info *n1
165 = (const struct cgraph_function_version_info *)p1;
166 const struct cgraph_function_version_info *n2
167 = (const struct cgraph_function_version_info *)p2;
168
169 return n1->this_node->uid == n2->this_node->uid;
170 }
171
172 /* Mark as GC root all allocated nodes. */
173 static GTY(()) struct cgraph_function_version_info *
174 version_info_node = NULL;
175
176 /* Get the cgraph_function_version_info node corresponding to node. */
177 struct cgraph_function_version_info *
178 get_cgraph_node_version (struct cgraph_node *node)
179 {
180 struct cgraph_function_version_info *ret;
181 struct cgraph_function_version_info key;
182 key.this_node = node;
183
184 if (cgraph_fnver_htab == NULL)
185 return NULL;
186
187 ret = (struct cgraph_function_version_info *)
188 htab_find (cgraph_fnver_htab, &key);
189
190 return ret;
191 }
192
193 /* Insert a new cgraph_function_version_info node into cgraph_fnver_htab
194 corresponding to cgraph_node NODE. */
195 struct cgraph_function_version_info *
196 insert_new_cgraph_node_version (struct cgraph_node *node)
197 {
198 void **slot;
199
200 version_info_node = NULL;
201 version_info_node = ggc_alloc_cleared_cgraph_function_version_info ();
202 version_info_node->this_node = node;
203
204 if (cgraph_fnver_htab == NULL)
205 cgraph_fnver_htab = htab_create_ggc (2, cgraph_fnver_htab_hash,
206 cgraph_fnver_htab_eq, NULL);
207
208 slot = htab_find_slot (cgraph_fnver_htab, version_info_node, INSERT);
209 gcc_assert (slot != NULL);
210 *slot = version_info_node;
211 return version_info_node;
212 }
213
214 /* Remove the cgraph_function_version_info and cgraph_node for DECL. This
215 DECL is a duplicate declaration. */
216 void
217 delete_function_version (tree decl)
218 {
219 struct cgraph_node *decl_node = cgraph_get_node (decl);
220 struct cgraph_function_version_info *decl_v = NULL;
221
222 if (decl_node == NULL)
223 return;
224
225 decl_v = get_cgraph_node_version (decl_node);
226
227 if (decl_v == NULL)
228 return;
229
230 if (decl_v->prev != NULL)
231 decl_v->prev->next = decl_v->next;
232
233 if (decl_v->next != NULL)
234 decl_v->next->prev = decl_v->prev;
235
236 if (cgraph_fnver_htab != NULL)
237 htab_remove_elt (cgraph_fnver_htab, decl_v);
238
239 cgraph_remove_node (decl_node);
240 }
241
242 /* Record that DECL1 and DECL2 are semantically identical function
243 versions. */
244 void
245 record_function_versions (tree decl1, tree decl2)
246 {
247 struct cgraph_node *decl1_node = cgraph_get_create_node (decl1);
248 struct cgraph_node *decl2_node = cgraph_get_create_node (decl2);
249 struct cgraph_function_version_info *decl1_v = NULL;
250 struct cgraph_function_version_info *decl2_v = NULL;
251 struct cgraph_function_version_info *before;
252 struct cgraph_function_version_info *after;
253
254 gcc_assert (decl1_node != NULL && decl2_node != NULL);
255 decl1_v = get_cgraph_node_version (decl1_node);
256 decl2_v = get_cgraph_node_version (decl2_node);
257
258 if (decl1_v != NULL && decl2_v != NULL)
259 return;
260
261 if (decl1_v == NULL)
262 decl1_v = insert_new_cgraph_node_version (decl1_node);
263
264 if (decl2_v == NULL)
265 decl2_v = insert_new_cgraph_node_version (decl2_node);
266
267 /* Chain decl2_v and decl1_v. All semantically identical versions
268 will be chained together. */
269
270 before = decl1_v;
271 after = decl2_v;
272
273 while (before->next != NULL)
274 before = before->next;
275
276 while (after->prev != NULL)
277 after= after->prev;
278
279 before->next = after;
280 after->prev = before;
281 }
282
283 /* Macros to access the next item in the list of free cgraph nodes and
284 edges. */
285 #define NEXT_FREE_NODE(NODE) cgraph ((NODE)->next)
286 #define SET_NEXT_FREE_NODE(NODE,NODE2) ((NODE))->next = NODE2
287 #define NEXT_FREE_EDGE(EDGE) (EDGE)->prev_caller
288
289 /* Register HOOK to be called with DATA on each removed edge. */
290 struct cgraph_edge_hook_list *
291 cgraph_add_edge_removal_hook (cgraph_edge_hook hook, void *data)
292 {
293 struct cgraph_edge_hook_list *entry;
294 struct cgraph_edge_hook_list **ptr = &first_cgraph_edge_removal_hook;
295
296 entry = (struct cgraph_edge_hook_list *) xmalloc (sizeof (*entry));
297 entry->hook = hook;
298 entry->data = data;
299 entry->next = NULL;
300 while (*ptr)
301 ptr = &(*ptr)->next;
302 *ptr = entry;
303 return entry;
304 }
305
306 /* Remove ENTRY from the list of hooks called on removing edges. */
307 void
308 cgraph_remove_edge_removal_hook (struct cgraph_edge_hook_list *entry)
309 {
310 struct cgraph_edge_hook_list **ptr = &first_cgraph_edge_removal_hook;
311
312 while (*ptr != entry)
313 ptr = &(*ptr)->next;
314 *ptr = entry->next;
315 free (entry);
316 }
317
318 /* Call all edge removal hooks. */
319 static void
320 cgraph_call_edge_removal_hooks (struct cgraph_edge *e)
321 {
322 struct cgraph_edge_hook_list *entry = first_cgraph_edge_removal_hook;
323 while (entry)
324 {
325 entry->hook (e, entry->data);
326 entry = entry->next;
327 }
328 }
329
330 /* Register HOOK to be called with DATA on each removed node. */
331 struct cgraph_node_hook_list *
332 cgraph_add_node_removal_hook (cgraph_node_hook hook, void *data)
333 {
334 struct cgraph_node_hook_list *entry;
335 struct cgraph_node_hook_list **ptr = &first_cgraph_node_removal_hook;
336
337 entry = (struct cgraph_node_hook_list *) xmalloc (sizeof (*entry));
338 entry->hook = hook;
339 entry->data = data;
340 entry->next = NULL;
341 while (*ptr)
342 ptr = &(*ptr)->next;
343 *ptr = entry;
344 return entry;
345 }
346
347 /* Remove ENTRY from the list of hooks called on removing nodes. */
348 void
349 cgraph_remove_node_removal_hook (struct cgraph_node_hook_list *entry)
350 {
351 struct cgraph_node_hook_list **ptr = &first_cgraph_node_removal_hook;
352
353 while (*ptr != entry)
354 ptr = &(*ptr)->next;
355 *ptr = entry->next;
356 free (entry);
357 }
358
359 /* Call all node removal hooks. */
360 static void
361 cgraph_call_node_removal_hooks (struct cgraph_node *node)
362 {
363 struct cgraph_node_hook_list *entry = first_cgraph_node_removal_hook;
364 while (entry)
365 {
366 entry->hook (node, entry->data);
367 entry = entry->next;
368 }
369 }
370
371 /* Register HOOK to be called with DATA on each inserted node. */
372 struct cgraph_node_hook_list *
373 cgraph_add_function_insertion_hook (cgraph_node_hook hook, void *data)
374 {
375 struct cgraph_node_hook_list *entry;
376 struct cgraph_node_hook_list **ptr = &first_cgraph_function_insertion_hook;
377
378 entry = (struct cgraph_node_hook_list *) xmalloc (sizeof (*entry));
379 entry->hook = hook;
380 entry->data = data;
381 entry->next = NULL;
382 while (*ptr)
383 ptr = &(*ptr)->next;
384 *ptr = entry;
385 return entry;
386 }
387
388 /* Remove ENTRY from the list of hooks called on inserted nodes. */
389 void
390 cgraph_remove_function_insertion_hook (struct cgraph_node_hook_list *entry)
391 {
392 struct cgraph_node_hook_list **ptr = &first_cgraph_function_insertion_hook;
393
394 while (*ptr != entry)
395 ptr = &(*ptr)->next;
396 *ptr = entry->next;
397 free (entry);
398 }
399
400 /* Call all node insertion hooks. */
401 void
402 cgraph_call_function_insertion_hooks (struct cgraph_node *node)
403 {
404 struct cgraph_node_hook_list *entry = first_cgraph_function_insertion_hook;
405 while (entry)
406 {
407 entry->hook (node, entry->data);
408 entry = entry->next;
409 }
410 }
411
412 /* Register HOOK to be called with DATA on each duplicated edge. */
413 struct cgraph_2edge_hook_list *
414 cgraph_add_edge_duplication_hook (cgraph_2edge_hook hook, void *data)
415 {
416 struct cgraph_2edge_hook_list *entry;
417 struct cgraph_2edge_hook_list **ptr = &first_cgraph_edge_duplicated_hook;
418
419 entry = (struct cgraph_2edge_hook_list *) xmalloc (sizeof (*entry));
420 entry->hook = hook;
421 entry->data = data;
422 entry->next = NULL;
423 while (*ptr)
424 ptr = &(*ptr)->next;
425 *ptr = entry;
426 return entry;
427 }
428
429 /* Remove ENTRY from the list of hooks called on duplicating edges. */
430 void
431 cgraph_remove_edge_duplication_hook (struct cgraph_2edge_hook_list *entry)
432 {
433 struct cgraph_2edge_hook_list **ptr = &first_cgraph_edge_duplicated_hook;
434
435 while (*ptr != entry)
436 ptr = &(*ptr)->next;
437 *ptr = entry->next;
438 free (entry);
439 }
440
441 /* Call all edge duplication hooks. */
442 void
443 cgraph_call_edge_duplication_hooks (struct cgraph_edge *cs1,
444 struct cgraph_edge *cs2)
445 {
446 struct cgraph_2edge_hook_list *entry = first_cgraph_edge_duplicated_hook;
447 while (entry)
448 {
449 entry->hook (cs1, cs2, entry->data);
450 entry = entry->next;
451 }
452 }
453
454 /* Register HOOK to be called with DATA on each duplicated node. */
455 struct cgraph_2node_hook_list *
456 cgraph_add_node_duplication_hook (cgraph_2node_hook hook, void *data)
457 {
458 struct cgraph_2node_hook_list *entry;
459 struct cgraph_2node_hook_list **ptr = &first_cgraph_node_duplicated_hook;
460
461 entry = (struct cgraph_2node_hook_list *) xmalloc (sizeof (*entry));
462 entry->hook = hook;
463 entry->data = data;
464 entry->next = NULL;
465 while (*ptr)
466 ptr = &(*ptr)->next;
467 *ptr = entry;
468 return entry;
469 }
470
471 /* Remove ENTRY from the list of hooks called on duplicating nodes. */
472 void
473 cgraph_remove_node_duplication_hook (struct cgraph_2node_hook_list *entry)
474 {
475 struct cgraph_2node_hook_list **ptr = &first_cgraph_node_duplicated_hook;
476
477 while (*ptr != entry)
478 ptr = &(*ptr)->next;
479 *ptr = entry->next;
480 free (entry);
481 }
482
483 /* Call all node duplication hooks. */
484 void
485 cgraph_call_node_duplication_hooks (struct cgraph_node *node1,
486 struct cgraph_node *node2)
487 {
488 struct cgraph_2node_hook_list *entry = first_cgraph_node_duplicated_hook;
489 while (entry)
490 {
491 entry->hook (node1, node2, entry->data);
492 entry = entry->next;
493 }
494 }
495
496 /* Allocate new callgraph node. */
497
498 static inline struct cgraph_node *
499 cgraph_allocate_node (void)
500 {
501 struct cgraph_node *node;
502
503 if (free_nodes)
504 {
505 node = free_nodes;
506 free_nodes = NEXT_FREE_NODE (node);
507 }
508 else
509 {
510 node = ggc_alloc_cleared_cgraph_node ();
511 node->uid = cgraph_max_uid++;
512 }
513
514 return node;
515 }
516
517 /* Allocate new callgraph node and insert it into basic data structures. */
518
519 struct cgraph_node *
520 cgraph_create_empty_node (void)
521 {
522 struct cgraph_node *node = cgraph_allocate_node ();
523
524 node->type = SYMTAB_FUNCTION;
525 node->frequency = NODE_FREQUENCY_NORMAL;
526 node->count_materialization_scale = REG_BR_PROB_BASE;
527 cgraph_n_nodes++;
528 return node;
529 }
530
531 /* Return cgraph node assigned to DECL. Create new one when needed. */
532
533 struct cgraph_node *
534 cgraph_create_node (tree decl)
535 {
536 struct cgraph_node *node = cgraph_create_empty_node ();
537 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
538
539 node->decl = decl;
540 symtab_register_node (node);
541
542 if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
543 {
544 node->origin = cgraph_get_create_node (DECL_CONTEXT (decl));
545 node->next_nested = node->origin->nested;
546 node->origin->nested = node;
547 }
548 return node;
549 }
550
551 /* Try to find a call graph node for declaration DECL and if it does not exist
552 or if it corresponds to an inline clone, create a new one. */
553
554 struct cgraph_node *
555 cgraph_get_create_node (tree decl)
556 {
557 struct cgraph_node *first_clone = cgraph_get_node (decl);
558
559 if (first_clone && !first_clone->global.inlined_to)
560 return first_clone;
561
562 struct cgraph_node *node = cgraph_create_node (decl);
563 if (first_clone)
564 {
565 first_clone->clone_of = node;
566 node->clones = first_clone;
567 symtab_prevail_in_asm_name_hash (node);
568 symtab_insert_node_to_hashtable (node);
569 if (dump_file)
570 fprintf (dump_file, "Introduced new external node "
571 "(%s/%i) and turned into root of the clone tree.\n",
572 xstrdup (node->name ()), node->order);
573 }
574 else if (dump_file)
575 fprintf (dump_file, "Introduced new external node "
576 "(%s/%i).\n", xstrdup (node->name ()),
577 node->order);
578 return node;
579 }
580
581 /* Mark ALIAS as an alias to DECL. DECL_NODE is cgraph node representing
582 the function body is associated with (not necessarily cgraph_node (DECL). */
583
584 struct cgraph_node *
585 cgraph_create_function_alias (tree alias, tree target)
586 {
587 struct cgraph_node *alias_node;
588
589 gcc_assert (TREE_CODE (target) == FUNCTION_DECL
590 || TREE_CODE (target) == IDENTIFIER_NODE);
591 gcc_assert (TREE_CODE (alias) == FUNCTION_DECL);
592 alias_node = cgraph_get_create_node (alias);
593 gcc_assert (!alias_node->definition);
594 alias_node->alias_target = target;
595 alias_node->definition = true;
596 alias_node->alias = true;
597 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (alias)) != NULL)
598 alias_node->weakref = true;
599 return alias_node;
600 }
601
602 /* Attempt to mark ALIAS as an alias to DECL. Return alias node if successful
603 and NULL otherwise.
604 Same body aliases are output whenever the body of DECL is output,
605 and cgraph_get_node (ALIAS) transparently returns cgraph_get_node (DECL). */
606
607 struct cgraph_node *
608 cgraph_same_body_alias (struct cgraph_node *decl_node ATTRIBUTE_UNUSED, tree alias, tree decl)
609 {
610 struct cgraph_node *n;
611 #ifndef ASM_OUTPUT_DEF
612 /* If aliases aren't supported by the assembler, fail. */
613 return NULL;
614 #endif
615 /* Langhooks can create same body aliases of symbols not defined.
616 Those are useless. Drop them on the floor. */
617 if (cgraph_global_info_ready)
618 return NULL;
619
620 n = cgraph_create_function_alias (alias, decl);
621 n->cpp_implicit_alias = true;
622 if (cpp_implicit_aliases_done)
623 symtab_resolve_alias (n,
624 cgraph_get_node (decl));
625 return n;
626 }
627
628 /* Add thunk alias into callgraph. The alias declaration is ALIAS and it
629 aliases DECL with an adjustments made into the first parameter.
630 See comments in thunk_adjust for detail on the parameters. */
631
632 struct cgraph_node *
633 cgraph_add_thunk (struct cgraph_node *decl_node ATTRIBUTE_UNUSED,
634 tree alias, tree decl ATTRIBUTE_UNUSED,
635 bool this_adjusting,
636 HOST_WIDE_INT fixed_offset, HOST_WIDE_INT virtual_value,
637 tree virtual_offset,
638 tree real_alias)
639 {
640 struct cgraph_node *node;
641
642 node = cgraph_get_node (alias);
643 if (node)
644 {
645 gcc_assert (node->definition);
646 gcc_assert (!node->alias);
647 gcc_assert (!node->thunk.thunk_p);
648 cgraph_remove_node (node);
649 }
650
651 node = cgraph_create_node (alias);
652 gcc_checking_assert (!virtual_offset
653 || tree_to_double_int (virtual_offset) ==
654 double_int::from_shwi (virtual_value));
655 node->thunk.fixed_offset = fixed_offset;
656 node->thunk.this_adjusting = this_adjusting;
657 node->thunk.virtual_value = virtual_value;
658 node->thunk.virtual_offset_p = virtual_offset != NULL;
659 node->thunk.alias = real_alias;
660 node->thunk.thunk_p = true;
661 node->definition = true;
662
663 return node;
664 }
665
666 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
667 Return NULL if there's no such node. */
668
669 struct cgraph_node *
670 cgraph_node_for_asm (tree asmname)
671 {
672 /* We do not want to look at inline clones. */
673 for (symtab_node *node = symtab_node_for_asm (asmname);
674 node;
675 node = node->next_sharing_asm_name)
676 {
677 cgraph_node *cn = dyn_cast <cgraph_node *> (node);
678 if (cn && !cn->global.inlined_to)
679 return cn;
680 }
681 return NULL;
682 }
683
684 /* Returns a hash value for X (which really is a cgraph_edge). */
685
686 static hashval_t
687 edge_hash (const void *x)
688 {
689 return htab_hash_pointer (((const struct cgraph_edge *) x)->call_stmt);
690 }
691
692 /* Return nonzero if the call_stmt of of cgraph_edge X is stmt *Y. */
693
694 static int
695 edge_eq (const void *x, const void *y)
696 {
697 return ((const struct cgraph_edge *) x)->call_stmt == y;
698 }
699
700 /* Add call graph edge E to call site hash of its caller. */
701
702 static inline void
703 cgraph_update_edge_in_call_site_hash (struct cgraph_edge *e)
704 {
705 void **slot;
706 slot = htab_find_slot_with_hash (e->caller->call_site_hash,
707 e->call_stmt,
708 htab_hash_pointer (e->call_stmt),
709 INSERT);
710 *slot = e;
711 }
712
713 /* Add call graph edge E to call site hash of its caller. */
714
715 static inline void
716 cgraph_add_edge_to_call_site_hash (struct cgraph_edge *e)
717 {
718 void **slot;
719 /* There are two speculative edges for every statement (one direct,
720 one indirect); always hash the direct one. */
721 if (e->speculative && e->indirect_unknown_callee)
722 return;
723 slot = htab_find_slot_with_hash (e->caller->call_site_hash,
724 e->call_stmt,
725 htab_hash_pointer (e->call_stmt),
726 INSERT);
727 if (*slot)
728 {
729 gcc_assert (((struct cgraph_edge *)*slot)->speculative);
730 if (e->callee)
731 *slot = e;
732 return;
733 }
734 gcc_assert (!*slot || e->speculative);
735 *slot = e;
736 }
737
738 /* Return the callgraph edge representing the GIMPLE_CALL statement
739 CALL_STMT. */
740
741 struct cgraph_edge *
742 cgraph_edge (struct cgraph_node *node, gimple call_stmt)
743 {
744 struct cgraph_edge *e, *e2;
745 int n = 0;
746
747 if (node->call_site_hash)
748 return (struct cgraph_edge *)
749 htab_find_with_hash (node->call_site_hash, call_stmt,
750 htab_hash_pointer (call_stmt));
751
752 /* This loop may turn out to be performance problem. In such case adding
753 hashtables into call nodes with very many edges is probably best
754 solution. It is not good idea to add pointer into CALL_EXPR itself
755 because we want to make possible having multiple cgraph nodes representing
756 different clones of the same body before the body is actually cloned. */
757 for (e = node->callees; e; e = e->next_callee)
758 {
759 if (e->call_stmt == call_stmt)
760 break;
761 n++;
762 }
763
764 if (!e)
765 for (e = node->indirect_calls; e; e = e->next_callee)
766 {
767 if (e->call_stmt == call_stmt)
768 break;
769 n++;
770 }
771
772 if (n > 100)
773 {
774 node->call_site_hash = htab_create_ggc (120, edge_hash, edge_eq, NULL);
775 for (e2 = node->callees; e2; e2 = e2->next_callee)
776 cgraph_add_edge_to_call_site_hash (e2);
777 for (e2 = node->indirect_calls; e2; e2 = e2->next_callee)
778 cgraph_add_edge_to_call_site_hash (e2);
779 }
780
781 return e;
782 }
783
784
785 /* Change field call_stmt of edge E to NEW_STMT.
786 If UPDATE_SPECULATIVE and E is any component of speculative
787 edge, then update all components. */
788
789 void
790 cgraph_set_call_stmt (struct cgraph_edge *e, gimple new_stmt,
791 bool update_speculative)
792 {
793 tree decl;
794
795 /* Speculative edges has three component, update all of them
796 when asked to. */
797 if (update_speculative && e->speculative)
798 {
799 struct cgraph_edge *direct, *indirect;
800 struct ipa_ref *ref;
801
802 cgraph_speculative_call_info (e, direct, indirect, ref);
803 cgraph_set_call_stmt (direct, new_stmt, false);
804 cgraph_set_call_stmt (indirect, new_stmt, false);
805 ref->stmt = new_stmt;
806 return;
807 }
808
809 /* Only direct speculative edges go to call_site_hash. */
810 if (e->caller->call_site_hash
811 && (!e->speculative || !e->indirect_unknown_callee))
812 {
813 htab_remove_elt_with_hash (e->caller->call_site_hash,
814 e->call_stmt,
815 htab_hash_pointer (e->call_stmt));
816 }
817
818 e->call_stmt = new_stmt;
819 if (e->indirect_unknown_callee
820 && (decl = gimple_call_fndecl (new_stmt)))
821 {
822 /* Constant propagation (and possibly also inlining?) can turn an
823 indirect call into a direct one. */
824 struct cgraph_node *new_callee = cgraph_get_node (decl);
825
826 gcc_checking_assert (new_callee);
827 e = cgraph_make_edge_direct (e, new_callee);
828 }
829
830 push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
831 e->can_throw_external = stmt_can_throw_external (new_stmt);
832 pop_cfun ();
833 if (e->caller->call_site_hash)
834 cgraph_add_edge_to_call_site_hash (e);
835 }
836
837 /* Allocate a cgraph_edge structure and fill it with data according to the
838 parameters of which only CALLEE can be NULL (when creating an indirect call
839 edge). */
840
841 static struct cgraph_edge *
842 cgraph_create_edge_1 (struct cgraph_node *caller, struct cgraph_node *callee,
843 gimple call_stmt, gcov_type count, int freq,
844 bool indir_unknown_callee)
845 {
846 struct cgraph_edge *edge;
847
848 /* LTO does not actually have access to the call_stmt since these
849 have not been loaded yet. */
850 if (call_stmt)
851 {
852 /* This is a rather expensive check possibly triggering
853 construction of call stmt hashtable. */
854 #ifdef ENABLE_CHECKING
855 struct cgraph_edge *e;
856 gcc_checking_assert (!(e=cgraph_edge (caller, call_stmt)) || e->speculative);
857 #endif
858
859 gcc_assert (is_gimple_call (call_stmt));
860 }
861
862 if (free_edges)
863 {
864 edge = free_edges;
865 free_edges = NEXT_FREE_EDGE (edge);
866 }
867 else
868 {
869 edge = ggc_alloc_cgraph_edge ();
870 edge->uid = cgraph_edge_max_uid++;
871 }
872
873 edge->aux = NULL;
874 edge->caller = caller;
875 edge->callee = callee;
876 edge->prev_caller = NULL;
877 edge->next_caller = NULL;
878 edge->prev_callee = NULL;
879 edge->next_callee = NULL;
880 edge->lto_stmt_uid = 0;
881
882 edge->count = count;
883 gcc_assert (count >= 0);
884 edge->frequency = freq;
885 gcc_assert (freq >= 0);
886 gcc_assert (freq <= CGRAPH_FREQ_MAX);
887
888 edge->call_stmt = call_stmt;
889 push_cfun (DECL_STRUCT_FUNCTION (caller->decl));
890 edge->can_throw_external
891 = call_stmt ? stmt_can_throw_external (call_stmt) : false;
892 pop_cfun ();
893 if (call_stmt
894 && callee && callee->decl
895 && !gimple_check_call_matching_types (call_stmt, callee->decl,
896 false))
897 edge->call_stmt_cannot_inline_p = true;
898 else
899 edge->call_stmt_cannot_inline_p = false;
900
901 edge->indirect_info = NULL;
902 edge->indirect_inlining_edge = 0;
903 edge->speculative = false;
904 edge->indirect_unknown_callee = indir_unknown_callee;
905 if (call_stmt && caller->call_site_hash)
906 cgraph_add_edge_to_call_site_hash (edge);
907
908 return edge;
909 }
910
911 /* Create edge from CALLER to CALLEE in the cgraph. */
912
913 struct cgraph_edge *
914 cgraph_create_edge (struct cgraph_node *caller, struct cgraph_node *callee,
915 gimple call_stmt, gcov_type count, int freq)
916 {
917 struct cgraph_edge *edge = cgraph_create_edge_1 (caller, callee, call_stmt,
918 count, freq, false);
919
920 initialize_inline_failed (edge);
921
922 edge->next_caller = callee->callers;
923 if (callee->callers)
924 callee->callers->prev_caller = edge;
925 edge->next_callee = caller->callees;
926 if (caller->callees)
927 caller->callees->prev_callee = edge;
928 caller->callees = edge;
929 callee->callers = edge;
930
931 return edge;
932 }
933
934 /* Allocate cgraph_indirect_call_info and set its fields to default values. */
935
936 struct cgraph_indirect_call_info *
937 cgraph_allocate_init_indirect_info (void)
938 {
939 struct cgraph_indirect_call_info *ii;
940
941 ii = ggc_alloc_cleared_cgraph_indirect_call_info ();
942 ii->param_index = -1;
943 return ii;
944 }
945
946 /* Create an indirect edge with a yet-undetermined callee where the call
947 statement destination is a formal parameter of the caller with index
948 PARAM_INDEX. */
949
950 struct cgraph_edge *
951 cgraph_create_indirect_edge (struct cgraph_node *caller, gimple call_stmt,
952 int ecf_flags,
953 gcov_type count, int freq)
954 {
955 struct cgraph_edge *edge = cgraph_create_edge_1 (caller, NULL, call_stmt,
956 count, freq, true);
957 tree target;
958
959 initialize_inline_failed (edge);
960
961 edge->indirect_info = cgraph_allocate_init_indirect_info ();
962 edge->indirect_info->ecf_flags = ecf_flags;
963
964 /* Record polymorphic call info. */
965 if (call_stmt
966 && (target = gimple_call_fn (call_stmt))
967 && virtual_method_call_p (target))
968 {
969 tree otr_type;
970 HOST_WIDE_INT otr_token;
971 ipa_polymorphic_call_context context;
972
973 get_polymorphic_call_info (caller->decl,
974 target,
975 &otr_type, &otr_token,
976 &context);
977
978 /* Only record types can have virtual calls. */
979 gcc_assert (TREE_CODE (otr_type) == RECORD_TYPE);
980 edge->indirect_info->polymorphic = true;
981 edge->indirect_info->param_index = -1;
982 edge->indirect_info->otr_token = otr_token;
983 edge->indirect_info->otr_type = otr_type;
984 edge->indirect_info->outer_type = context.outer_type;
985 edge->indirect_info->offset = context.offset;
986 edge->indirect_info->maybe_in_construction
987 = context.maybe_in_construction;
988 edge->indirect_info->maybe_derived_type = context.maybe_derived_type;
989 }
990
991 edge->next_callee = caller->indirect_calls;
992 if (caller->indirect_calls)
993 caller->indirect_calls->prev_callee = edge;
994 caller->indirect_calls = edge;
995
996 return edge;
997 }
998
999 /* Remove the edge E from the list of the callers of the callee. */
1000
1001 static inline void
1002 cgraph_edge_remove_callee (struct cgraph_edge *e)
1003 {
1004 gcc_assert (!e->indirect_unknown_callee);
1005 if (e->prev_caller)
1006 e->prev_caller->next_caller = e->next_caller;
1007 if (e->next_caller)
1008 e->next_caller->prev_caller = e->prev_caller;
1009 if (!e->prev_caller)
1010 e->callee->callers = e->next_caller;
1011 }
1012
1013 /* Remove the edge E from the list of the callees of the caller. */
1014
1015 static inline void
1016 cgraph_edge_remove_caller (struct cgraph_edge *e)
1017 {
1018 if (e->prev_callee)
1019 e->prev_callee->next_callee = e->next_callee;
1020 if (e->next_callee)
1021 e->next_callee->prev_callee = e->prev_callee;
1022 if (!e->prev_callee)
1023 {
1024 if (e->indirect_unknown_callee)
1025 e->caller->indirect_calls = e->next_callee;
1026 else
1027 e->caller->callees = e->next_callee;
1028 }
1029 if (e->caller->call_site_hash)
1030 htab_remove_elt_with_hash (e->caller->call_site_hash,
1031 e->call_stmt,
1032 htab_hash_pointer (e->call_stmt));
1033 }
1034
1035 /* Put the edge onto the free list. */
1036
1037 static void
1038 cgraph_free_edge (struct cgraph_edge *e)
1039 {
1040 int uid = e->uid;
1041
1042 if (e->indirect_info)
1043 ggc_free (e->indirect_info);
1044
1045 /* Clear out the edge so we do not dangle pointers. */
1046 memset (e, 0, sizeof (*e));
1047 e->uid = uid;
1048 NEXT_FREE_EDGE (e) = free_edges;
1049 free_edges = e;
1050 }
1051
1052 /* Remove the edge E in the cgraph. */
1053
1054 void
1055 cgraph_remove_edge (struct cgraph_edge *e)
1056 {
1057 /* Call all edge removal hooks. */
1058 cgraph_call_edge_removal_hooks (e);
1059
1060 if (!e->indirect_unknown_callee)
1061 /* Remove from callers list of the callee. */
1062 cgraph_edge_remove_callee (e);
1063
1064 /* Remove from callees list of the callers. */
1065 cgraph_edge_remove_caller (e);
1066
1067 /* Put the edge onto the free list. */
1068 cgraph_free_edge (e);
1069 }
1070
1071 /* Set callee of call graph edge E and add it to the corresponding set of
1072 callers. */
1073
1074 static void
1075 cgraph_set_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
1076 {
1077 e->prev_caller = NULL;
1078 if (n->callers)
1079 n->callers->prev_caller = e;
1080 e->next_caller = n->callers;
1081 n->callers = e;
1082 e->callee = n;
1083 }
1084
1085 /* Turn edge E into speculative call calling N2. Update
1086 the profile so the direct call is taken COUNT times
1087 with FREQUENCY.
1088
1089 At clone materialization time, the indirect call E will
1090 be expanded as:
1091
1092 if (call_dest == N2)
1093 n2 ();
1094 else
1095 call call_dest
1096
1097 At this time the function just creates the direct call,
1098 the referencd representing the if conditional and attaches
1099 them all to the orginal indirect call statement.
1100
1101 Return direct edge created. */
1102
1103 struct cgraph_edge *
1104 cgraph_turn_edge_to_speculative (struct cgraph_edge *e,
1105 struct cgraph_node *n2,
1106 gcov_type direct_count,
1107 int direct_frequency)
1108 {
1109 struct cgraph_node *n = e->caller;
1110 struct ipa_ref *ref;
1111 struct cgraph_edge *e2;
1112
1113 if (dump_file)
1114 {
1115 fprintf (dump_file, "Indirect call -> speculative call"
1116 " %s/%i => %s/%i\n",
1117 xstrdup (n->name ()), n->order,
1118 xstrdup (n2->name ()), n2->order);
1119 }
1120 e->speculative = true;
1121 e2 = cgraph_create_edge (n, n2, e->call_stmt, direct_count, direct_frequency);
1122 initialize_inline_failed (e2);
1123 e2->speculative = true;
1124 if (TREE_NOTHROW (n2->decl))
1125 e2->can_throw_external = false;
1126 else
1127 e2->can_throw_external = e->can_throw_external;
1128 e2->lto_stmt_uid = e->lto_stmt_uid;
1129 e->count -= e2->count;
1130 e->frequency -= e2->frequency;
1131 cgraph_call_edge_duplication_hooks (e, e2);
1132 ref = ipa_record_reference (n, n2,
1133 IPA_REF_ADDR, e->call_stmt);
1134 ref->lto_stmt_uid = e->lto_stmt_uid;
1135 ref->speculative = e->speculative;
1136 cgraph_mark_address_taken_node (n2);
1137 return e2;
1138 }
1139
1140 /* Speculative call consist of three components:
1141 1) an indirect edge representing the original call
1142 2) an direct edge representing the new call
1143 3) ADDR_EXPR reference representing the speculative check.
1144 All three components are attached to single statement (the indirect
1145 call) and if one of them exists, all of them must exist.
1146
1147 Given speculative call edge E, return all three components.
1148 */
1149
1150 void
1151 cgraph_speculative_call_info (struct cgraph_edge *e,
1152 struct cgraph_edge *&direct,
1153 struct cgraph_edge *&indirect,
1154 struct ipa_ref *&reference)
1155 {
1156 struct ipa_ref *ref;
1157 int i;
1158 struct cgraph_edge *e2;
1159
1160 if (!e->indirect_unknown_callee)
1161 for (e2 = e->caller->indirect_calls;
1162 e2->call_stmt != e->call_stmt || e2->lto_stmt_uid != e->lto_stmt_uid;
1163 e2 = e2->next_callee)
1164 ;
1165 else
1166 {
1167 e2 = e;
1168 /* We can take advantage of the call stmt hash. */
1169 if (e2->call_stmt)
1170 {
1171 e = cgraph_edge (e->caller, e2->call_stmt);
1172 gcc_assert (e->speculative && !e->indirect_unknown_callee);
1173 }
1174 else
1175 for (e = e->caller->callees;
1176 e2->call_stmt != e->call_stmt
1177 || e2->lto_stmt_uid != e->lto_stmt_uid;
1178 e = e->next_callee)
1179 ;
1180 }
1181 gcc_assert (e->speculative && e2->speculative);
1182 direct = e;
1183 indirect = e2;
1184
1185 reference = NULL;
1186 for (i = 0; ipa_ref_list_reference_iterate (&e->caller->ref_list,
1187 i, ref); i++)
1188 if (ref->speculative
1189 && ((ref->stmt && ref->stmt == e->call_stmt)
1190 || (!ref->stmt && ref->lto_stmt_uid == e->lto_stmt_uid)))
1191 {
1192 reference = ref;
1193 break;
1194 }
1195
1196 /* Speculative edge always consist of all three components - direct edge,
1197 indirect and reference. */
1198
1199 gcc_assert (e && e2 && ref);
1200 }
1201
1202 /* Redirect callee of E to N. The function does not update underlying
1203 call expression. */
1204
1205 void
1206 cgraph_redirect_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
1207 {
1208 /* Remove from callers list of the current callee. */
1209 cgraph_edge_remove_callee (e);
1210
1211 /* Insert to callers list of the new callee. */
1212 cgraph_set_edge_callee (e, n);
1213 }
1214
1215 /* Speculative call EDGE turned out to be direct call to CALLE_DECL.
1216 Remove the speculative call sequence and return edge representing the call.
1217 It is up to caller to redirect the call as appropriate. */
1218
1219 struct cgraph_edge *
1220 cgraph_resolve_speculation (struct cgraph_edge *edge, tree callee_decl)
1221 {
1222 struct cgraph_edge *e2;
1223 struct ipa_ref *ref;
1224
1225 gcc_assert (edge->speculative);
1226 cgraph_speculative_call_info (edge, e2, edge, ref);
1227 if (!callee_decl
1228 || !symtab_semantically_equivalent_p (ref->referred,
1229 symtab_get_node (callee_decl)))
1230 {
1231 if (dump_file)
1232 {
1233 if (callee_decl)
1234 {
1235 fprintf (dump_file, "Speculative indirect call %s/%i => %s/%i has "
1236 "turned out to have contradicting known target ",
1237 xstrdup (edge->caller->name ()), edge->caller->order,
1238 xstrdup (e2->callee->name ()), e2->callee->order);
1239 print_generic_expr (dump_file, callee_decl, 0);
1240 fprintf (dump_file, "\n");
1241 }
1242 else
1243 {
1244 fprintf (dump_file, "Removing speculative call %s/%i => %s/%i\n",
1245 xstrdup (edge->caller->name ()), edge->caller->order,
1246 xstrdup (e2->callee->name ()), e2->callee->order);
1247 }
1248 }
1249 }
1250 else
1251 {
1252 struct cgraph_edge *tmp = edge;
1253 if (dump_file)
1254 fprintf (dump_file, "Speculative call turned into direct call.\n");
1255 edge = e2;
1256 e2 = tmp;
1257 /* FIXME: If EDGE is inlined, we should scale up the frequencies and counts
1258 in the functions inlined through it. */
1259 }
1260 edge->count += e2->count;
1261 edge->frequency += e2->frequency;
1262 if (edge->frequency > CGRAPH_FREQ_MAX)
1263 edge->frequency = CGRAPH_FREQ_MAX;
1264 edge->speculative = false;
1265 e2->speculative = false;
1266 ipa_remove_reference (ref);
1267 if (e2->indirect_unknown_callee || e2->inline_failed)
1268 cgraph_remove_edge (e2);
1269 else
1270 cgraph_remove_node_and_inline_clones (e2->callee, NULL);
1271 if (edge->caller->call_site_hash)
1272 cgraph_update_edge_in_call_site_hash (edge);
1273 return edge;
1274 }
1275
1276 /* Make an indirect EDGE with an unknown callee an ordinary edge leading to
1277 CALLEE. DELTA is an integer constant that is to be added to the this
1278 pointer (first parameter) to compensate for skipping a thunk adjustment. */
1279
1280 struct cgraph_edge *
1281 cgraph_make_edge_direct (struct cgraph_edge *edge, struct cgraph_node *callee)
1282 {
1283 gcc_assert (edge->indirect_unknown_callee);
1284
1285 /* If we are redirecting speculative call, make it non-speculative. */
1286 if (edge->indirect_unknown_callee && edge->speculative)
1287 {
1288 edge = cgraph_resolve_speculation (edge, callee->decl);
1289
1290 /* On successful speculation just return the pre existing direct edge. */
1291 if (!edge->indirect_unknown_callee)
1292 return edge;
1293 }
1294
1295 edge->indirect_unknown_callee = 0;
1296 ggc_free (edge->indirect_info);
1297 edge->indirect_info = NULL;
1298
1299 /* Get the edge out of the indirect edge list. */
1300 if (edge->prev_callee)
1301 edge->prev_callee->next_callee = edge->next_callee;
1302 if (edge->next_callee)
1303 edge->next_callee->prev_callee = edge->prev_callee;
1304 if (!edge->prev_callee)
1305 edge->caller->indirect_calls = edge->next_callee;
1306
1307 /* Put it into the normal callee list */
1308 edge->prev_callee = NULL;
1309 edge->next_callee = edge->caller->callees;
1310 if (edge->caller->callees)
1311 edge->caller->callees->prev_callee = edge;
1312 edge->caller->callees = edge;
1313
1314 /* Insert to callers list of the new callee. */
1315 cgraph_set_edge_callee (edge, callee);
1316
1317 if (edge->call_stmt)
1318 edge->call_stmt_cannot_inline_p
1319 = !gimple_check_call_matching_types (edge->call_stmt, callee->decl,
1320 false);
1321
1322 /* We need to re-determine the inlining status of the edge. */
1323 initialize_inline_failed (edge);
1324 return edge;
1325 }
1326
1327 /* If necessary, change the function declaration in the call statement
1328 associated with E so that it corresponds to the edge callee. */
1329
1330 gimple
1331 cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e)
1332 {
1333 tree decl = gimple_call_fndecl (e->call_stmt);
1334 tree lhs = gimple_call_lhs (e->call_stmt);
1335 gimple new_stmt;
1336 gimple_stmt_iterator gsi;
1337 #ifdef ENABLE_CHECKING
1338 struct cgraph_node *node;
1339 #endif
1340
1341 if (e->speculative)
1342 {
1343 struct cgraph_edge *e2;
1344 gimple new_stmt;
1345 struct ipa_ref *ref;
1346
1347 cgraph_speculative_call_info (e, e, e2, ref);
1348 /* If there already is an direct call (i.e. as a result of inliner's
1349 substitution), forget about speculating. */
1350 if (decl)
1351 e = cgraph_resolve_speculation (e, decl);
1352 /* If types do not match, speculation was likely wrong.
1353 The direct edge was posisbly redirected to the clone with a different
1354 signature. We did not update the call statement yet, so compare it
1355 with the reference that still points to the proper type. */
1356 else if (!gimple_check_call_matching_types (e->call_stmt,
1357 ref->referred->decl,
1358 true))
1359 {
1360 if (dump_file)
1361 fprintf (dump_file, "Not expanding speculative call of %s/%i -> %s/%i\n"
1362 "Type mismatch.\n",
1363 xstrdup (e->caller->name ()),
1364 e->caller->order,
1365 xstrdup (e->callee->name ()),
1366 e->callee->order);
1367 e = cgraph_resolve_speculation (e, NULL);
1368 /* We are producing the final function body and will throw away the
1369 callgraph edges really soon. Reset the counts/frequencies to
1370 keep verifier happy in the case of roundoff errors. */
1371 e->count = gimple_bb (e->call_stmt)->count;
1372 e->frequency = compute_call_stmt_bb_frequency
1373 (e->caller->decl, gimple_bb (e->call_stmt));
1374 }
1375 /* Expand speculation into GIMPLE code. */
1376 else
1377 {
1378 if (dump_file)
1379 fprintf (dump_file,
1380 "Expanding speculative call of %s/%i -> %s/%i count:"
1381 HOST_WIDEST_INT_PRINT_DEC"\n",
1382 xstrdup (e->caller->name ()),
1383 e->caller->order,
1384 xstrdup (e->callee->name ()),
1385 e->callee->order,
1386 (HOST_WIDEST_INT)e->count);
1387 gcc_assert (e2->speculative);
1388 push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
1389 new_stmt = gimple_ic (e->call_stmt, cgraph (ref->referred),
1390 e->count || e2->count
1391 ? RDIV (e->count * REG_BR_PROB_BASE,
1392 e->count + e2->count)
1393 : e->frequency || e2->frequency
1394 ? RDIV (e->frequency * REG_BR_PROB_BASE,
1395 e->frequency + e2->frequency)
1396 : REG_BR_PROB_BASE / 2,
1397 e->count, e->count + e2->count);
1398 e->speculative = false;
1399 cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt,
1400 new_stmt, false);
1401 e->frequency = compute_call_stmt_bb_frequency
1402 (e->caller->decl, gimple_bb (e->call_stmt));
1403 e2->frequency = compute_call_stmt_bb_frequency
1404 (e2->caller->decl, gimple_bb (e2->call_stmt));
1405 e2->speculative = false;
1406 ref->speculative = false;
1407 ref->stmt = NULL;
1408 /* Indirect edges are not both in the call site hash.
1409 get it updated. */
1410 if (e->caller->call_site_hash)
1411 cgraph_update_edge_in_call_site_hash (e2);
1412 pop_cfun ();
1413 /* Continue redirecting E to proper target. */
1414 }
1415 }
1416
1417 if (e->indirect_unknown_callee
1418 || decl == e->callee->decl)
1419 return e->call_stmt;
1420
1421 #ifdef ENABLE_CHECKING
1422 if (decl)
1423 {
1424 node = cgraph_get_node (decl);
1425 gcc_assert (!node || !node->clone.combined_args_to_skip);
1426 }
1427 #endif
1428
1429 if (cgraph_dump_file)
1430 {
1431 fprintf (cgraph_dump_file, "updating call of %s/%i -> %s/%i: ",
1432 xstrdup (e->caller->name ()), e->caller->order,
1433 xstrdup (e->callee->name ()), e->callee->order);
1434 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
1435 if (e->callee->clone.combined_args_to_skip)
1436 {
1437 fprintf (cgraph_dump_file, " combined args to skip: ");
1438 dump_bitmap (cgraph_dump_file,
1439 e->callee->clone.combined_args_to_skip);
1440 }
1441 }
1442
1443 if (e->callee->clone.combined_args_to_skip)
1444 {
1445 int lp_nr;
1446
1447 new_stmt
1448 = gimple_call_copy_skip_args (e->call_stmt,
1449 e->callee->clone.combined_args_to_skip);
1450 gimple_call_set_fndecl (new_stmt, e->callee->decl);
1451 gimple_call_set_fntype (new_stmt, gimple_call_fntype (e->call_stmt));
1452
1453 if (gimple_vdef (new_stmt)
1454 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1455 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1456
1457 gsi = gsi_for_stmt (e->call_stmt);
1458 gsi_replace (&gsi, new_stmt, false);
1459 /* We need to defer cleaning EH info on the new statement to
1460 fixup-cfg. We may not have dominator information at this point
1461 and thus would end up with unreachable blocks and have no way
1462 to communicate that we need to run CFG cleanup then. */
1463 lp_nr = lookup_stmt_eh_lp (e->call_stmt);
1464 if (lp_nr != 0)
1465 {
1466 remove_stmt_from_eh_lp (e->call_stmt);
1467 add_stmt_to_eh_lp (new_stmt, lp_nr);
1468 }
1469 }
1470 else
1471 {
1472 new_stmt = e->call_stmt;
1473 gimple_call_set_fndecl (new_stmt, e->callee->decl);
1474 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1475 }
1476
1477 /* If the call becomes noreturn, remove the lhs. */
1478 if (lhs && (gimple_call_flags (new_stmt) & ECF_NORETURN))
1479 {
1480 if (TREE_CODE (lhs) == SSA_NAME)
1481 {
1482 tree var = create_tmp_reg_fn (DECL_STRUCT_FUNCTION (e->caller->decl),
1483 TREE_TYPE (lhs), NULL);
1484 var = get_or_create_ssa_default_def
1485 (DECL_STRUCT_FUNCTION (e->caller->decl), var);
1486 gimple set_stmt = gimple_build_assign (lhs, var);
1487 gsi = gsi_for_stmt (new_stmt);
1488 gsi_insert_before_without_update (&gsi, set_stmt, GSI_SAME_STMT);
1489 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), set_stmt);
1490 }
1491 gimple_call_set_lhs (new_stmt, NULL_TREE);
1492 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1493 }
1494
1495 /* If new callee has no static chain, remove it. */
1496 if (gimple_call_chain (new_stmt) && !DECL_STATIC_CHAIN (e->callee->decl))
1497 {
1498 gimple_call_set_chain (new_stmt, NULL);
1499 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1500 }
1501
1502 cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt, new_stmt, false);
1503
1504 if (cgraph_dump_file)
1505 {
1506 fprintf (cgraph_dump_file, " updated to:");
1507 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
1508 }
1509 return new_stmt;
1510 }
1511
1512 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1513 OLD_STMT changed into NEW_STMT. OLD_CALL is gimple_call_fndecl
1514 of OLD_STMT if it was previously call statement.
1515 If NEW_STMT is NULL, the call has been dropped without any
1516 replacement. */
1517
1518 static void
1519 cgraph_update_edges_for_call_stmt_node (struct cgraph_node *node,
1520 gimple old_stmt, tree old_call,
1521 gimple new_stmt)
1522 {
1523 tree new_call = (new_stmt && is_gimple_call (new_stmt))
1524 ? gimple_call_fndecl (new_stmt) : 0;
1525
1526 /* We are seeing indirect calls, then there is nothing to update. */
1527 if (!new_call && !old_call)
1528 return;
1529 /* See if we turned indirect call into direct call or folded call to one builtin
1530 into different builtin. */
1531 if (old_call != new_call)
1532 {
1533 struct cgraph_edge *e = cgraph_edge (node, old_stmt);
1534 struct cgraph_edge *ne = NULL;
1535 gcov_type count;
1536 int frequency;
1537
1538 if (e)
1539 {
1540 /* See if the edge is already there and has the correct callee. It
1541 might be so because of indirect inlining has already updated
1542 it. We also might've cloned and redirected the edge. */
1543 if (new_call && e->callee)
1544 {
1545 struct cgraph_node *callee = e->callee;
1546 while (callee)
1547 {
1548 if (callee->decl == new_call
1549 || callee->former_clone_of == new_call)
1550 {
1551 cgraph_set_call_stmt (e, new_stmt);
1552 return;
1553 }
1554 callee = callee->clone_of;
1555 }
1556 }
1557
1558 /* Otherwise remove edge and create new one; we can't simply redirect
1559 since function has changed, so inline plan and other information
1560 attached to edge is invalid. */
1561 count = e->count;
1562 frequency = e->frequency;
1563 if (e->indirect_unknown_callee || e->inline_failed)
1564 cgraph_remove_edge (e);
1565 else
1566 cgraph_remove_node_and_inline_clones (e->callee, NULL);
1567 }
1568 else if (new_call)
1569 {
1570 /* We are seeing new direct call; compute profile info based on BB. */
1571 basic_block bb = gimple_bb (new_stmt);
1572 count = bb->count;
1573 frequency = compute_call_stmt_bb_frequency (current_function_decl,
1574 bb);
1575 }
1576
1577 if (new_call)
1578 {
1579 ne = cgraph_create_edge (node, cgraph_get_create_node (new_call),
1580 new_stmt, count, frequency);
1581 gcc_assert (ne->inline_failed);
1582 }
1583 }
1584 /* We only updated the call stmt; update pointer in cgraph edge.. */
1585 else if (old_stmt != new_stmt)
1586 cgraph_set_call_stmt (cgraph_edge (node, old_stmt), new_stmt);
1587 }
1588
1589 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1590 OLD_STMT changed into NEW_STMT. OLD_DECL is gimple_call_fndecl
1591 of OLD_STMT before it was updated (updating can happen inplace). */
1592
1593 void
1594 cgraph_update_edges_for_call_stmt (gimple old_stmt, tree old_decl, gimple new_stmt)
1595 {
1596 struct cgraph_node *orig = cgraph_get_node (cfun->decl);
1597 struct cgraph_node *node;
1598
1599 gcc_checking_assert (orig);
1600 cgraph_update_edges_for_call_stmt_node (orig, old_stmt, old_decl, new_stmt);
1601 if (orig->clones)
1602 for (node = orig->clones; node != orig;)
1603 {
1604 cgraph_update_edges_for_call_stmt_node (node, old_stmt, old_decl, new_stmt);
1605 if (node->clones)
1606 node = node->clones;
1607 else if (node->next_sibling_clone)
1608 node = node->next_sibling_clone;
1609 else
1610 {
1611 while (node != orig && !node->next_sibling_clone)
1612 node = node->clone_of;
1613 if (node != orig)
1614 node = node->next_sibling_clone;
1615 }
1616 }
1617 }
1618
1619
1620 /* Remove all callees from the node. */
1621
1622 void
1623 cgraph_node_remove_callees (struct cgraph_node *node)
1624 {
1625 struct cgraph_edge *e, *f;
1626
1627 /* It is sufficient to remove the edges from the lists of callers of
1628 the callees. The callee list of the node can be zapped with one
1629 assignment. */
1630 for (e = node->callees; e; e = f)
1631 {
1632 f = e->next_callee;
1633 cgraph_call_edge_removal_hooks (e);
1634 if (!e->indirect_unknown_callee)
1635 cgraph_edge_remove_callee (e);
1636 cgraph_free_edge (e);
1637 }
1638 for (e = node->indirect_calls; e; e = f)
1639 {
1640 f = e->next_callee;
1641 cgraph_call_edge_removal_hooks (e);
1642 if (!e->indirect_unknown_callee)
1643 cgraph_edge_remove_callee (e);
1644 cgraph_free_edge (e);
1645 }
1646 node->indirect_calls = NULL;
1647 node->callees = NULL;
1648 if (node->call_site_hash)
1649 {
1650 htab_delete (node->call_site_hash);
1651 node->call_site_hash = NULL;
1652 }
1653 }
1654
1655 /* Remove all callers from the node. */
1656
1657 static void
1658 cgraph_node_remove_callers (struct cgraph_node *node)
1659 {
1660 struct cgraph_edge *e, *f;
1661
1662 /* It is sufficient to remove the edges from the lists of callees of
1663 the callers. The caller list of the node can be zapped with one
1664 assignment. */
1665 for (e = node->callers; e; e = f)
1666 {
1667 f = e->next_caller;
1668 cgraph_call_edge_removal_hooks (e);
1669 cgraph_edge_remove_caller (e);
1670 cgraph_free_edge (e);
1671 }
1672 node->callers = NULL;
1673 }
1674
1675 /* Helper function for cgraph_release_function_body and free_lang_data.
1676 It releases body from function DECL without having to inspect its
1677 possibly non-existent symtab node. */
1678
1679 void
1680 release_function_body (tree decl)
1681 {
1682 if (DECL_STRUCT_FUNCTION (decl))
1683 {
1684 push_cfun (DECL_STRUCT_FUNCTION (decl));
1685 if (cfun->cfg
1686 && current_loops)
1687 {
1688 cfun->curr_properties &= ~PROP_loops;
1689 loop_optimizer_finalize ();
1690 }
1691 if (cfun->gimple_df)
1692 {
1693 delete_tree_ssa ();
1694 delete_tree_cfg_annotations ();
1695 cfun->eh = NULL;
1696 }
1697 if (cfun->cfg)
1698 {
1699 gcc_assert (dom_computed[0] == DOM_NONE);
1700 gcc_assert (dom_computed[1] == DOM_NONE);
1701 clear_edges ();
1702 cfun->cfg = NULL;
1703 }
1704 if (cfun->value_histograms)
1705 free_histograms ();
1706 pop_cfun ();
1707 gimple_set_body (decl, NULL);
1708 /* Struct function hangs a lot of data that would leak if we didn't
1709 removed all pointers to it. */
1710 ggc_free (DECL_STRUCT_FUNCTION (decl));
1711 DECL_STRUCT_FUNCTION (decl) = NULL;
1712 }
1713 DECL_SAVED_TREE (decl) = NULL;
1714 }
1715
1716 /* Release memory used to represent body of function NODE.
1717 Use this only for functions that are released before being translated to
1718 target code (i.e. RTL). Functions that are compiled to RTL and beyond
1719 are free'd in final.c via free_after_compilation(). */
1720
1721 void
1722 cgraph_release_function_body (struct cgraph_node *node)
1723 {
1724 node->ipa_transforms_to_apply.release ();
1725 if (!node->used_as_abstract_origin && cgraph_state != CGRAPH_STATE_PARSING)
1726 {
1727 DECL_RESULT (node->decl) = NULL;
1728 DECL_ARGUMENTS (node->decl) = NULL;
1729 }
1730 /* If the node is abstract and needed, then do not clear DECL_INITIAL
1731 of its associated function function declaration because it's
1732 needed to emit debug info later. */
1733 if (!node->used_as_abstract_origin && DECL_INITIAL (node->decl))
1734 DECL_INITIAL (node->decl) = error_mark_node;
1735 release_function_body (node->decl);
1736 if (node->lto_file_data)
1737 lto_free_function_in_decl_state_for_node (node);
1738 }
1739
1740 /* Remove the node from cgraph. */
1741
1742 void
1743 cgraph_remove_node (struct cgraph_node *node)
1744 {
1745 struct cgraph_node *n;
1746 int uid = node->uid;
1747
1748 cgraph_call_node_removal_hooks (node);
1749 cgraph_node_remove_callers (node);
1750 cgraph_node_remove_callees (node);
1751 node->ipa_transforms_to_apply.release ();
1752
1753 /* Incremental inlining access removed nodes stored in the postorder list.
1754 */
1755 node->force_output = false;
1756 node->forced_by_abi = false;
1757 for (n = node->nested; n; n = n->next_nested)
1758 n->origin = NULL;
1759 node->nested = NULL;
1760 if (node->origin)
1761 {
1762 struct cgraph_node **node2 = &node->origin->nested;
1763
1764 while (*node2 != node)
1765 node2 = &(*node2)->next_nested;
1766 *node2 = node->next_nested;
1767 }
1768 symtab_unregister_node (node);
1769 if (node->prev_sibling_clone)
1770 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
1771 else if (node->clone_of)
1772 node->clone_of->clones = node->next_sibling_clone;
1773 if (node->next_sibling_clone)
1774 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
1775 if (node->clones)
1776 {
1777 struct cgraph_node *n, *next;
1778
1779 if (node->clone_of)
1780 {
1781 for (n = node->clones; n->next_sibling_clone; n = n->next_sibling_clone)
1782 n->clone_of = node->clone_of;
1783 n->clone_of = node->clone_of;
1784 n->next_sibling_clone = node->clone_of->clones;
1785 if (node->clone_of->clones)
1786 node->clone_of->clones->prev_sibling_clone = n;
1787 node->clone_of->clones = node->clones;
1788 }
1789 else
1790 {
1791 /* We are removing node with clones. This makes clones inconsistent,
1792 but assume they will be removed subsequently and just keep clone
1793 tree intact. This can happen in unreachable function removal since
1794 we remove unreachable functions in random order, not by bottom-up
1795 walk of clone trees. */
1796 for (n = node->clones; n; n = next)
1797 {
1798 next = n->next_sibling_clone;
1799 n->next_sibling_clone = NULL;
1800 n->prev_sibling_clone = NULL;
1801 n->clone_of = NULL;
1802 }
1803 }
1804 }
1805
1806 /* While all the clones are removed after being proceeded, the function
1807 itself is kept in the cgraph even after it is compiled. Check whether
1808 we are done with this body and reclaim it proactively if this is the case.
1809 */
1810 if (cgraph_state != CGRAPH_LTO_STREAMING)
1811 {
1812 n = cgraph_get_node (node->decl);
1813 if (!n
1814 || (!n->clones && !n->clone_of && !n->global.inlined_to
1815 && (cgraph_global_info_ready
1816 && (TREE_ASM_WRITTEN (n->decl)
1817 || DECL_EXTERNAL (n->decl)
1818 || !n->analyzed
1819 || (!flag_wpa && n->in_other_partition)))))
1820 cgraph_release_function_body (node);
1821 }
1822
1823 node->decl = NULL;
1824 if (node->call_site_hash)
1825 {
1826 htab_delete (node->call_site_hash);
1827 node->call_site_hash = NULL;
1828 }
1829 cgraph_n_nodes--;
1830
1831 /* Clear out the node to NULL all pointers and add the node to the free
1832 list. */
1833 memset (node, 0, sizeof (*node));
1834 node->type = SYMTAB_FUNCTION;
1835 node->uid = uid;
1836 SET_NEXT_FREE_NODE (node, free_nodes);
1837 free_nodes = node;
1838 }
1839
1840 /* Likewise indicate that a node is having address taken. */
1841
1842 void
1843 cgraph_mark_address_taken_node (struct cgraph_node *node)
1844 {
1845 /* Indirect inlining can figure out that all uses of the address are
1846 inlined. */
1847 if (node->global.inlined_to)
1848 {
1849 gcc_assert (cfun->after_inlining);
1850 gcc_assert (node->callers->indirect_inlining_edge);
1851 return;
1852 }
1853 /* FIXME: address_taken flag is used both as a shortcut for testing whether
1854 IPA_REF_ADDR reference exists (and thus it should be set on node
1855 representing alias we take address of) and as a test whether address
1856 of the object was taken (and thus it should be set on node alias is
1857 referring to). We should remove the first use and the remove the
1858 following set. */
1859 node->address_taken = 1;
1860 node = cgraph_function_or_thunk_node (node, NULL);
1861 node->address_taken = 1;
1862 }
1863
1864 /* Return local info for the compiled function. */
1865
1866 struct cgraph_local_info *
1867 cgraph_local_info (tree decl)
1868 {
1869 struct cgraph_node *node;
1870
1871 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1872 node = cgraph_get_node (decl);
1873 if (!node)
1874 return NULL;
1875 return &node->local;
1876 }
1877
1878 /* Return local info for the compiled function. */
1879
1880 struct cgraph_global_info *
1881 cgraph_global_info (tree decl)
1882 {
1883 struct cgraph_node *node;
1884
1885 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL && cgraph_global_info_ready);
1886 node = cgraph_get_node (decl);
1887 if (!node)
1888 return NULL;
1889 return &node->global;
1890 }
1891
1892 /* Return local info for the compiled function. */
1893
1894 struct cgraph_rtl_info *
1895 cgraph_rtl_info (tree decl)
1896 {
1897 struct cgraph_node *node;
1898
1899 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1900 node = cgraph_get_node (decl);
1901 if (!node
1902 || (decl != current_function_decl
1903 && !TREE_ASM_WRITTEN (node->decl)))
1904 return NULL;
1905 return &node->rtl;
1906 }
1907
1908 /* Return a string describing the failure REASON. */
1909
1910 const char*
1911 cgraph_inline_failed_string (cgraph_inline_failed_t reason)
1912 {
1913 #undef DEFCIFCODE
1914 #define DEFCIFCODE(code, type, string) string,
1915
1916 static const char *cif_string_table[CIF_N_REASONS] = {
1917 #include "cif-code.def"
1918 };
1919
1920 /* Signedness of an enum type is implementation defined, so cast it
1921 to unsigned before testing. */
1922 gcc_assert ((unsigned) reason < CIF_N_REASONS);
1923 return cif_string_table[reason];
1924 }
1925
1926 /* Return a type describing the failure REASON. */
1927
1928 cgraph_inline_failed_type_t
1929 cgraph_inline_failed_type (cgraph_inline_failed_t reason)
1930 {
1931 #undef DEFCIFCODE
1932 #define DEFCIFCODE(code, type, string) type,
1933
1934 static cgraph_inline_failed_type_t cif_type_table[CIF_N_REASONS] = {
1935 #include "cif-code.def"
1936 };
1937
1938 /* Signedness of an enum type is implementation defined, so cast it
1939 to unsigned before testing. */
1940 gcc_assert ((unsigned) reason < CIF_N_REASONS);
1941 return cif_type_table[reason];
1942 }
1943
1944 /* Names used to print out the availability enum. */
1945 const char * const cgraph_availability_names[] =
1946 {"unset", "not_available", "overwritable", "available", "local"};
1947
1948
1949 /* Dump call graph node NODE to file F. */
1950
1951 void
1952 dump_cgraph_node (FILE *f, struct cgraph_node *node)
1953 {
1954 struct cgraph_edge *edge;
1955 int indirect_calls_count = 0;
1956
1957 dump_symtab_base (f, node);
1958
1959 if (node->global.inlined_to)
1960 fprintf (f, " Function %s/%i is inline copy in %s/%i\n",
1961 xstrdup (node->name ()),
1962 node->order,
1963 xstrdup (node->global.inlined_to->name ()),
1964 node->global.inlined_to->order);
1965 if (node->clone_of)
1966 fprintf (f, " Clone of %s/%i\n",
1967 node->clone_of->asm_name (),
1968 node->clone_of->order);
1969 if (cgraph_function_flags_ready)
1970 fprintf (f, " Availability: %s\n",
1971 cgraph_availability_names [cgraph_function_body_availability (node)]);
1972
1973 if (node->profile_id)
1974 fprintf (f, " Profile id: %i\n",
1975 node->profile_id);
1976 fprintf (f, " First run: %i\n", node->tp_first_run);
1977 fprintf (f, " Function flags:");
1978 if (node->count)
1979 fprintf (f, " executed "HOST_WIDEST_INT_PRINT_DEC"x",
1980 (HOST_WIDEST_INT)node->count);
1981 if (node->origin)
1982 fprintf (f, " nested in: %s", node->origin->asm_name ());
1983 if (gimple_has_body_p (node->decl))
1984 fprintf (f, " body");
1985 if (node->process)
1986 fprintf (f, " process");
1987 if (node->local.local)
1988 fprintf (f, " local");
1989 if (node->local.redefined_extern_inline)
1990 fprintf (f, " redefined_extern_inline");
1991 if (node->only_called_at_startup)
1992 fprintf (f, " only_called_at_startup");
1993 if (node->only_called_at_exit)
1994 fprintf (f, " only_called_at_exit");
1995 if (node->tm_clone)
1996 fprintf (f, " tm_clone");
1997
1998 fprintf (f, "\n");
1999
2000 if (node->thunk.thunk_p)
2001 {
2002 fprintf (f, " Thunk");
2003 if (node->thunk.alias)
2004 fprintf (f, " of %s (asm: %s)",
2005 lang_hooks.decl_printable_name (node->thunk.alias, 2),
2006 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->thunk.alias)));
2007 fprintf (f, " fixed offset %i virtual value %i has "
2008 "virtual offset %i)\n",
2009 (int)node->thunk.fixed_offset,
2010 (int)node->thunk.virtual_value,
2011 (int)node->thunk.virtual_offset_p);
2012 }
2013 if (node->alias && node->thunk.alias
2014 && DECL_P (node->thunk.alias))
2015 {
2016 fprintf (f, " Alias of %s",
2017 lang_hooks.decl_printable_name (node->thunk.alias, 2));
2018 if (DECL_ASSEMBLER_NAME_SET_P (node->thunk.alias))
2019 fprintf (f, " (asm: %s)",
2020 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->thunk.alias)));
2021 fprintf (f, "\n");
2022 }
2023
2024 fprintf (f, " Called by: ");
2025
2026 for (edge = node->callers; edge; edge = edge->next_caller)
2027 {
2028 fprintf (f, "%s/%i ", edge->caller->asm_name (),
2029 edge->caller->order);
2030 if (edge->count)
2031 fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
2032 (HOST_WIDEST_INT)edge->count);
2033 if (edge->frequency)
2034 fprintf (f, "(%.2f per call) ",
2035 edge->frequency / (double)CGRAPH_FREQ_BASE);
2036 if (edge->speculative)
2037 fprintf (f, "(speculative) ");
2038 if (!edge->inline_failed)
2039 fprintf (f, "(inlined) ");
2040 if (edge->indirect_inlining_edge)
2041 fprintf (f, "(indirect_inlining) ");
2042 if (edge->can_throw_external)
2043 fprintf (f, "(can throw external) ");
2044 }
2045
2046 fprintf (f, "\n Calls: ");
2047 for (edge = node->callees; edge; edge = edge->next_callee)
2048 {
2049 fprintf (f, "%s/%i ", edge->callee->asm_name (),
2050 edge->callee->order);
2051 if (edge->speculative)
2052 fprintf (f, "(speculative) ");
2053 if (!edge->inline_failed)
2054 fprintf (f, "(inlined) ");
2055 if (edge->indirect_inlining_edge)
2056 fprintf (f, "(indirect_inlining) ");
2057 if (edge->count)
2058 fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
2059 (HOST_WIDEST_INT)edge->count);
2060 if (edge->frequency)
2061 fprintf (f, "(%.2f per call) ",
2062 edge->frequency / (double)CGRAPH_FREQ_BASE);
2063 if (edge->can_throw_external)
2064 fprintf (f, "(can throw external) ");
2065 }
2066 fprintf (f, "\n");
2067
2068 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
2069 indirect_calls_count++;
2070 if (indirect_calls_count)
2071 fprintf (f, " Has %i outgoing edges for indirect calls.\n",
2072 indirect_calls_count);
2073 }
2074
2075
2076 /* Dump call graph node NODE to stderr. */
2077
2078 DEBUG_FUNCTION void
2079 debug_cgraph_node (struct cgraph_node *node)
2080 {
2081 dump_cgraph_node (stderr, node);
2082 }
2083
2084
2085 /* Dump the callgraph to file F. */
2086
2087 void
2088 dump_cgraph (FILE *f)
2089 {
2090 struct cgraph_node *node;
2091
2092 fprintf (f, "callgraph:\n\n");
2093 FOR_EACH_FUNCTION (node)
2094 dump_cgraph_node (f, node);
2095 }
2096
2097
2098 /* Dump the call graph to stderr. */
2099
2100 DEBUG_FUNCTION void
2101 debug_cgraph (void)
2102 {
2103 dump_cgraph (stderr);
2104 }
2105
2106 /* Return true when the DECL can possibly be inlined. */
2107 bool
2108 cgraph_function_possibly_inlined_p (tree decl)
2109 {
2110 if (!cgraph_global_info_ready)
2111 return !DECL_UNINLINABLE (decl);
2112 return DECL_POSSIBLY_INLINED (decl);
2113 }
2114
2115 /* NODE is no longer nested function; update cgraph accordingly. */
2116 void
2117 cgraph_unnest_node (struct cgraph_node *node)
2118 {
2119 struct cgraph_node **node2 = &node->origin->nested;
2120 gcc_assert (node->origin);
2121
2122 while (*node2 != node)
2123 node2 = &(*node2)->next_nested;
2124 *node2 = node->next_nested;
2125 node->origin = NULL;
2126 }
2127
2128 /* Return function availability. See cgraph.h for description of individual
2129 return values. */
2130 enum availability
2131 cgraph_function_body_availability (struct cgraph_node *node)
2132 {
2133 enum availability avail;
2134 if (!node->analyzed)
2135 avail = AVAIL_NOT_AVAILABLE;
2136 else if (node->local.local)
2137 avail = AVAIL_LOCAL;
2138 else if (node->alias && node->weakref)
2139 cgraph_function_or_thunk_node (node, &avail);
2140 else if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (node->decl)))
2141 avail = AVAIL_OVERWRITABLE;
2142 else if (!node->externally_visible)
2143 avail = AVAIL_AVAILABLE;
2144 /* Inline functions are safe to be analyzed even if their symbol can
2145 be overwritten at runtime. It is not meaningful to enforce any sane
2146 behaviour on replacing inline function by different body. */
2147 else if (DECL_DECLARED_INLINE_P (node->decl))
2148 avail = AVAIL_AVAILABLE;
2149
2150 /* If the function can be overwritten, return OVERWRITABLE. Take
2151 care at least of two notable extensions - the COMDAT functions
2152 used to share template instantiations in C++ (this is symmetric
2153 to code cp_cannot_inline_tree_fn and probably shall be shared and
2154 the inlinability hooks completely eliminated).
2155
2156 ??? Does the C++ one definition rule allow us to always return
2157 AVAIL_AVAILABLE here? That would be good reason to preserve this
2158 bit. */
2159
2160 else if (decl_replaceable_p (node->decl)
2161 && !DECL_EXTERNAL (node->decl))
2162 avail = AVAIL_OVERWRITABLE;
2163 else avail = AVAIL_AVAILABLE;
2164
2165 return avail;
2166 }
2167
2168 /* Worker for cgraph_node_can_be_local_p. */
2169 static bool
2170 cgraph_node_cannot_be_local_p_1 (struct cgraph_node *node,
2171 void *data ATTRIBUTE_UNUSED)
2172 {
2173 return !(!node->force_output
2174 && ((DECL_COMDAT (node->decl)
2175 && !node->forced_by_abi
2176 && !symtab_used_from_object_file_p (node)
2177 && !node->same_comdat_group)
2178 || !node->externally_visible));
2179 }
2180
2181 /* Return true if NODE can be made local for API change.
2182 Extern inline functions and C++ COMDAT functions can be made local
2183 at the expense of possible code size growth if function is used in multiple
2184 compilation units. */
2185 bool
2186 cgraph_node_can_be_local_p (struct cgraph_node *node)
2187 {
2188 return (!node->address_taken
2189 && !cgraph_for_node_and_aliases (node,
2190 cgraph_node_cannot_be_local_p_1,
2191 NULL, true));
2192 }
2193
2194 /* Call calback on NODE, thunks and aliases associated to NODE.
2195 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
2196 skipped. */
2197
2198 bool
2199 cgraph_for_node_thunks_and_aliases (struct cgraph_node *node,
2200 bool (*callback) (struct cgraph_node *, void *),
2201 void *data,
2202 bool include_overwritable)
2203 {
2204 struct cgraph_edge *e;
2205 int i;
2206 struct ipa_ref *ref;
2207
2208 if (callback (node, data))
2209 return true;
2210 for (e = node->callers; e; e = e->next_caller)
2211 if (e->caller->thunk.thunk_p
2212 && (include_overwritable
2213 || cgraph_function_body_availability (e->caller) > AVAIL_OVERWRITABLE))
2214 if (cgraph_for_node_thunks_and_aliases (e->caller, callback, data,
2215 include_overwritable))
2216 return true;
2217 for (i = 0; ipa_ref_list_referring_iterate (&node->ref_list, i, ref); i++)
2218 if (ref->use == IPA_REF_ALIAS)
2219 {
2220 struct cgraph_node *alias = ipa_ref_referring_node (ref);
2221 if (include_overwritable
2222 || cgraph_function_body_availability (alias) > AVAIL_OVERWRITABLE)
2223 if (cgraph_for_node_thunks_and_aliases (alias, callback, data,
2224 include_overwritable))
2225 return true;
2226 }
2227 return false;
2228 }
2229
2230 /* Call calback on NODE and aliases associated to NODE.
2231 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
2232 skipped. */
2233
2234 bool
2235 cgraph_for_node_and_aliases (struct cgraph_node *node,
2236 bool (*callback) (struct cgraph_node *, void *),
2237 void *data,
2238 bool include_overwritable)
2239 {
2240 int i;
2241 struct ipa_ref *ref;
2242
2243 if (callback (node, data))
2244 return true;
2245 for (i = 0; ipa_ref_list_referring_iterate (&node->ref_list, i, ref); i++)
2246 if (ref->use == IPA_REF_ALIAS)
2247 {
2248 struct cgraph_node *alias = ipa_ref_referring_node (ref);
2249 if (include_overwritable
2250 || cgraph_function_body_availability (alias) > AVAIL_OVERWRITABLE)
2251 if (cgraph_for_node_and_aliases (alias, callback, data,
2252 include_overwritable))
2253 return true;
2254 }
2255 return false;
2256 }
2257
2258 /* Worker to bring NODE local. */
2259
2260 static bool
2261 cgraph_make_node_local_1 (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2262 {
2263 gcc_checking_assert (cgraph_node_can_be_local_p (node));
2264 if (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
2265 {
2266 symtab_make_decl_local (node->decl);
2267
2268 node->externally_visible = false;
2269 node->forced_by_abi = false;
2270 node->local.local = true;
2271 node->unique_name = (node->resolution == LDPR_PREVAILING_DEF_IRONLY
2272 || node->resolution == LDPR_PREVAILING_DEF_IRONLY_EXP);
2273 node->resolution = LDPR_PREVAILING_DEF_IRONLY;
2274 gcc_assert (cgraph_function_body_availability (node) == AVAIL_LOCAL);
2275 }
2276 return false;
2277 }
2278
2279 /* Bring NODE local. */
2280
2281 void
2282 cgraph_make_node_local (struct cgraph_node *node)
2283 {
2284 cgraph_for_node_thunks_and_aliases (node, cgraph_make_node_local_1,
2285 NULL, true);
2286 }
2287
2288 /* Worker to set nothrow flag. */
2289
2290 static bool
2291 cgraph_set_nothrow_flag_1 (struct cgraph_node *node, void *data)
2292 {
2293 struct cgraph_edge *e;
2294
2295 TREE_NOTHROW (node->decl) = data != NULL;
2296
2297 if (data != NULL)
2298 for (e = node->callers; e; e = e->next_caller)
2299 e->can_throw_external = false;
2300 return false;
2301 }
2302
2303 /* Set TREE_NOTHROW on NODE's decl and on aliases of NODE
2304 if any to NOTHROW. */
2305
2306 void
2307 cgraph_set_nothrow_flag (struct cgraph_node *node, bool nothrow)
2308 {
2309 cgraph_for_node_thunks_and_aliases (node, cgraph_set_nothrow_flag_1,
2310 (void *)(size_t)nothrow, false);
2311 }
2312
2313 /* Worker to set const flag. */
2314
2315 static bool
2316 cgraph_set_const_flag_1 (struct cgraph_node *node, void *data)
2317 {
2318 /* Static constructors and destructors without a side effect can be
2319 optimized out. */
2320 if (data && !((size_t)data & 2))
2321 {
2322 if (DECL_STATIC_CONSTRUCTOR (node->decl))
2323 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2324 if (DECL_STATIC_DESTRUCTOR (node->decl))
2325 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2326 }
2327 TREE_READONLY (node->decl) = data != NULL;
2328 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = ((size_t)data & 2) != 0;
2329 return false;
2330 }
2331
2332 /* Set TREE_READONLY on NODE's decl and on aliases of NODE
2333 if any to READONLY. */
2334
2335 void
2336 cgraph_set_const_flag (struct cgraph_node *node, bool readonly, bool looping)
2337 {
2338 cgraph_for_node_thunks_and_aliases (node, cgraph_set_const_flag_1,
2339 (void *)(size_t)(readonly + (int)looping * 2),
2340 false);
2341 }
2342
2343 /* Worker to set pure flag. */
2344
2345 static bool
2346 cgraph_set_pure_flag_1 (struct cgraph_node *node, void *data)
2347 {
2348 /* Static constructors and destructors without a side effect can be
2349 optimized out. */
2350 if (data && !((size_t)data & 2))
2351 {
2352 if (DECL_STATIC_CONSTRUCTOR (node->decl))
2353 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2354 if (DECL_STATIC_DESTRUCTOR (node->decl))
2355 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2356 }
2357 DECL_PURE_P (node->decl) = data != NULL;
2358 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = ((size_t)data & 2) != 0;
2359 return false;
2360 }
2361
2362 /* Set DECL_PURE_P on NODE's decl and on aliases of NODE
2363 if any to PURE. */
2364
2365 void
2366 cgraph_set_pure_flag (struct cgraph_node *node, bool pure, bool looping)
2367 {
2368 cgraph_for_node_thunks_and_aliases (node, cgraph_set_pure_flag_1,
2369 (void *)(size_t)(pure + (int)looping * 2),
2370 false);
2371 }
2372
2373 /* Return true when NODE can not return or throw and thus
2374 it is safe to ignore its side effects for IPA analysis. */
2375
2376 bool
2377 cgraph_node_cannot_return (struct cgraph_node *node)
2378 {
2379 int flags = flags_from_decl_or_type (node->decl);
2380 if (!flag_exceptions)
2381 return (flags & ECF_NORETURN) != 0;
2382 else
2383 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2384 == (ECF_NORETURN | ECF_NOTHROW));
2385 }
2386
2387 /* Return true when call of E can not lead to return from caller
2388 and thus it is safe to ignore its side effects for IPA analysis
2389 when computing side effects of the caller.
2390 FIXME: We could actually mark all edges that have no reaching
2391 patch to the exit block or throw to get better results. */
2392 bool
2393 cgraph_edge_cannot_lead_to_return (struct cgraph_edge *e)
2394 {
2395 if (cgraph_node_cannot_return (e->caller))
2396 return true;
2397 if (e->indirect_unknown_callee)
2398 {
2399 int flags = e->indirect_info->ecf_flags;
2400 if (!flag_exceptions)
2401 return (flags & ECF_NORETURN) != 0;
2402 else
2403 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2404 == (ECF_NORETURN | ECF_NOTHROW));
2405 }
2406 else
2407 return cgraph_node_cannot_return (e->callee);
2408 }
2409
2410 /* Return true when function NODE can be removed from callgraph
2411 if all direct calls are eliminated. */
2412
2413 bool
2414 cgraph_can_remove_if_no_direct_calls_and_refs_p (struct cgraph_node *node)
2415 {
2416 gcc_assert (!node->global.inlined_to);
2417 /* Extern inlines can always go, we will use the external definition. */
2418 if (DECL_EXTERNAL (node->decl))
2419 return true;
2420 /* When function is needed, we can not remove it. */
2421 if (node->force_output || node->used_from_other_partition)
2422 return false;
2423 if (DECL_STATIC_CONSTRUCTOR (node->decl)
2424 || DECL_STATIC_DESTRUCTOR (node->decl))
2425 return false;
2426 /* Only COMDAT functions can be removed if externally visible. */
2427 if (node->externally_visible
2428 && (!DECL_COMDAT (node->decl)
2429 || node->forced_by_abi
2430 || symtab_used_from_object_file_p (node)))
2431 return false;
2432 return true;
2433 }
2434
2435 /* Worker for cgraph_can_remove_if_no_direct_calls_p. */
2436
2437 static bool
2438 nonremovable_p (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2439 {
2440 return !cgraph_can_remove_if_no_direct_calls_and_refs_p (node);
2441 }
2442
2443 /* Return true when function NODE and its aliases can be removed from callgraph
2444 if all direct calls are eliminated. */
2445
2446 bool
2447 cgraph_can_remove_if_no_direct_calls_p (struct cgraph_node *node)
2448 {
2449 /* Extern inlines can always go, we will use the external definition. */
2450 if (DECL_EXTERNAL (node->decl))
2451 return true;
2452 if (node->address_taken)
2453 return false;
2454 return !cgraph_for_node_and_aliases (node, nonremovable_p, NULL, true);
2455 }
2456
2457 /* Worker for cgraph_can_remove_if_no_direct_calls_p. */
2458
2459 static bool
2460 used_from_object_file_p (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2461 {
2462 return symtab_used_from_object_file_p (node);
2463 }
2464
2465 /* Return true when function NODE can be expected to be removed
2466 from program when direct calls in this compilation unit are removed.
2467
2468 As a special case COMDAT functions are
2469 cgraph_can_remove_if_no_direct_calls_p while the are not
2470 cgraph_only_called_directly_p (it is possible they are called from other
2471 unit)
2472
2473 This function behaves as cgraph_only_called_directly_p because eliminating
2474 all uses of COMDAT function does not make it necessarily disappear from
2475 the program unless we are compiling whole program or we do LTO. In this
2476 case we know we win since dynamic linking will not really discard the
2477 linkonce section. */
2478
2479 bool
2480 cgraph_will_be_removed_from_program_if_no_direct_calls (struct cgraph_node *node)
2481 {
2482 gcc_assert (!node->global.inlined_to);
2483 if (cgraph_for_node_and_aliases (node, used_from_object_file_p, NULL, true))
2484 return false;
2485 if (!in_lto_p && !flag_whole_program)
2486 return cgraph_only_called_directly_p (node);
2487 else
2488 {
2489 if (DECL_EXTERNAL (node->decl))
2490 return true;
2491 return cgraph_can_remove_if_no_direct_calls_p (node);
2492 }
2493 }
2494
2495
2496 /* Worker for cgraph_only_called_directly_p. */
2497
2498 static bool
2499 cgraph_not_only_called_directly_p_1 (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2500 {
2501 return !cgraph_only_called_directly_or_aliased_p (node);
2502 }
2503
2504 /* Return true when function NODE and all its aliases are only called
2505 directly.
2506 i.e. it is not externally visible, address was not taken and
2507 it is not used in any other non-standard way. */
2508
2509 bool
2510 cgraph_only_called_directly_p (struct cgraph_node *node)
2511 {
2512 gcc_assert (cgraph_function_or_thunk_node (node, NULL) == node);
2513 return !cgraph_for_node_and_aliases (node, cgraph_not_only_called_directly_p_1,
2514 NULL, true);
2515 }
2516
2517
2518 /* Collect all callers of NODE. Worker for collect_callers_of_node. */
2519
2520 static bool
2521 collect_callers_of_node_1 (struct cgraph_node *node, void *data)
2522 {
2523 vec<cgraph_edge_p> *redirect_callers = (vec<cgraph_edge_p> *)data;
2524 struct cgraph_edge *cs;
2525 enum availability avail;
2526 cgraph_function_or_thunk_node (node, &avail);
2527
2528 if (avail > AVAIL_OVERWRITABLE)
2529 for (cs = node->callers; cs != NULL; cs = cs->next_caller)
2530 if (!cs->indirect_inlining_edge)
2531 redirect_callers->safe_push (cs);
2532 return false;
2533 }
2534
2535 /* Collect all callers of NODE and its aliases that are known to lead to NODE
2536 (i.e. are not overwritable). */
2537
2538 vec<cgraph_edge_p>
2539 collect_callers_of_node (struct cgraph_node *node)
2540 {
2541 vec<cgraph_edge_p> redirect_callers = vNULL;
2542 cgraph_for_node_and_aliases (node, collect_callers_of_node_1,
2543 &redirect_callers, false);
2544 return redirect_callers;
2545 }
2546
2547 /* Return TRUE if NODE2 a clone of NODE or is equivalent to it. */
2548
2549 static bool
2550 clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
2551 {
2552 bool skipped_thunk = false;
2553 node = cgraph_function_or_thunk_node (node, NULL);
2554 node2 = cgraph_function_or_thunk_node (node2, NULL);
2555
2556 /* There are no virtual clones of thunks so check former_clone_of or if we
2557 might have skipped thunks because this adjustments are no longer
2558 necessary. */
2559 while (node->thunk.thunk_p)
2560 {
2561 if (node2->former_clone_of == node->decl)
2562 return true;
2563 if (!node->thunk.this_adjusting)
2564 return false;
2565 node = cgraph_function_or_thunk_node (node->callees->callee, NULL);
2566 skipped_thunk = true;
2567 }
2568
2569 if (skipped_thunk
2570 && (!node2->clone_of
2571 || !node2->clone.args_to_skip
2572 || !bitmap_bit_p (node2->clone.args_to_skip, 0)))
2573 return false;
2574
2575 while (node != node2 && node2)
2576 node2 = node2->clone_of;
2577 return node2 != NULL;
2578 }
2579
2580 /* Verify edge E count and frequency. */
2581
2582 static bool
2583 verify_edge_count_and_frequency (struct cgraph_edge *e)
2584 {
2585 bool error_found = false;
2586 if (e->count < 0)
2587 {
2588 error ("caller edge count is negative");
2589 error_found = true;
2590 }
2591 if (e->frequency < 0)
2592 {
2593 error ("caller edge frequency is negative");
2594 error_found = true;
2595 }
2596 if (e->frequency > CGRAPH_FREQ_MAX)
2597 {
2598 error ("caller edge frequency is too large");
2599 error_found = true;
2600 }
2601 if (gimple_has_body_p (e->caller->decl)
2602 && !e->caller->global.inlined_to
2603 && !e->speculative
2604 /* FIXME: Inline-analysis sets frequency to 0 when edge is optimized out.
2605 Remove this once edges are actually removed from the function at that time. */
2606 && (e->frequency
2607 || (inline_edge_summary_vec.exists ()
2608 && ((inline_edge_summary_vec.length () <= (unsigned) e->uid)
2609 || !inline_edge_summary (e)->predicate)))
2610 && (e->frequency
2611 != compute_call_stmt_bb_frequency (e->caller->decl,
2612 gimple_bb (e->call_stmt))))
2613 {
2614 error ("caller edge frequency %i does not match BB frequency %i",
2615 e->frequency,
2616 compute_call_stmt_bb_frequency (e->caller->decl,
2617 gimple_bb (e->call_stmt)));
2618 error_found = true;
2619 }
2620 return error_found;
2621 }
2622
2623 /* Switch to THIS_CFUN if needed and print STMT to stderr. */
2624 static void
2625 cgraph_debug_gimple_stmt (struct function *this_cfun, gimple stmt)
2626 {
2627 bool fndecl_was_null = false;
2628 /* debug_gimple_stmt needs correct cfun */
2629 if (cfun != this_cfun)
2630 set_cfun (this_cfun);
2631 /* ...and an actual current_function_decl */
2632 if (!current_function_decl)
2633 {
2634 current_function_decl = this_cfun->decl;
2635 fndecl_was_null = true;
2636 }
2637 debug_gimple_stmt (stmt);
2638 if (fndecl_was_null)
2639 current_function_decl = NULL;
2640 }
2641
2642 /* Verify that call graph edge E corresponds to DECL from the associated
2643 statement. Return true if the verification should fail. */
2644
2645 static bool
2646 verify_edge_corresponds_to_fndecl (struct cgraph_edge *e, tree decl)
2647 {
2648 struct cgraph_node *node;
2649
2650 if (!decl || e->callee->global.inlined_to)
2651 return false;
2652 if (cgraph_state == CGRAPH_LTO_STREAMING)
2653 return false;
2654 node = cgraph_get_node (decl);
2655
2656 /* We do not know if a node from a different partition is an alias or what it
2657 aliases and therefore cannot do the former_clone_of check reliably. When
2658 body_removed is set, we have lost all information about what was alias or
2659 thunk of and also cannot proceed. */
2660 if (!node
2661 || node->body_removed
2662 || node->in_other_partition
2663 || e->callee->in_other_partition)
2664 return false;
2665
2666 /* Optimizers can redirect unreachable calls or calls triggering undefined
2667 behaviour to builtin_unreachable. */
2668 if (DECL_BUILT_IN_CLASS (e->callee->decl) == BUILT_IN_NORMAL
2669 && DECL_FUNCTION_CODE (e->callee->decl) == BUILT_IN_UNREACHABLE)
2670 return false;
2671 node = cgraph_function_or_thunk_node (node, NULL);
2672
2673 if (e->callee->former_clone_of != node->decl
2674 && (node != cgraph_function_or_thunk_node (e->callee, NULL))
2675 && !clone_of_p (node, e->callee))
2676 return true;
2677 else
2678 return false;
2679 }
2680
2681 /* Verify cgraph nodes of given cgraph node. */
2682 DEBUG_FUNCTION void
2683 verify_cgraph_node (struct cgraph_node *node)
2684 {
2685 struct cgraph_edge *e;
2686 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
2687 basic_block this_block;
2688 gimple_stmt_iterator gsi;
2689 bool error_found = false;
2690
2691 if (seen_error ())
2692 return;
2693
2694 timevar_push (TV_CGRAPH_VERIFY);
2695 error_found |= verify_symtab_base (node);
2696 for (e = node->callees; e; e = e->next_callee)
2697 if (e->aux)
2698 {
2699 error ("aux field set for edge %s->%s",
2700 identifier_to_locale (e->caller->name ()),
2701 identifier_to_locale (e->callee->name ()));
2702 error_found = true;
2703 }
2704 if (node->count < 0)
2705 {
2706 error ("execution count is negative");
2707 error_found = true;
2708 }
2709 if (node->global.inlined_to && node->same_comdat_group)
2710 {
2711 error ("inline clone in same comdat group list");
2712 error_found = true;
2713 }
2714 if (!node->definition && !node->in_other_partition && node->local.local)
2715 {
2716 error ("local symbols must be defined");
2717 error_found = true;
2718 }
2719 if (node->global.inlined_to && node->externally_visible)
2720 {
2721 error ("externally visible inline clone");
2722 error_found = true;
2723 }
2724 if (node->global.inlined_to && node->address_taken)
2725 {
2726 error ("inline clone with address taken");
2727 error_found = true;
2728 }
2729 if (node->global.inlined_to && node->force_output)
2730 {
2731 error ("inline clone is forced to output");
2732 error_found = true;
2733 }
2734 for (e = node->indirect_calls; e; e = e->next_callee)
2735 {
2736 if (e->aux)
2737 {
2738 error ("aux field set for indirect edge from %s",
2739 identifier_to_locale (e->caller->name ()));
2740 error_found = true;
2741 }
2742 if (!e->indirect_unknown_callee
2743 || !e->indirect_info)
2744 {
2745 error ("An indirect edge from %s is not marked as indirect or has "
2746 "associated indirect_info, the corresponding statement is: ",
2747 identifier_to_locale (e->caller->name ()));
2748 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
2749 error_found = true;
2750 }
2751 }
2752 bool check_comdat = symtab_comdat_local_p (node);
2753 for (e = node->callers; e; e = e->next_caller)
2754 {
2755 if (verify_edge_count_and_frequency (e))
2756 error_found = true;
2757 if (check_comdat
2758 && !symtab_in_same_comdat_p (e->caller, node))
2759 {
2760 error ("comdat-local function called by %s outside its comdat",
2761 identifier_to_locale (e->caller->name ()));
2762 error_found = true;
2763 }
2764 if (!e->inline_failed)
2765 {
2766 if (node->global.inlined_to
2767 != (e->caller->global.inlined_to
2768 ? e->caller->global.inlined_to : e->caller))
2769 {
2770 error ("inlined_to pointer is wrong");
2771 error_found = true;
2772 }
2773 if (node->callers->next_caller)
2774 {
2775 error ("multiple inline callers");
2776 error_found = true;
2777 }
2778 }
2779 else
2780 if (node->global.inlined_to)
2781 {
2782 error ("inlined_to pointer set for noninline callers");
2783 error_found = true;
2784 }
2785 }
2786 for (e = node->indirect_calls; e; e = e->next_callee)
2787 if (verify_edge_count_and_frequency (e))
2788 error_found = true;
2789 if (!node->callers && node->global.inlined_to)
2790 {
2791 error ("inlined_to pointer is set but no predecessors found");
2792 error_found = true;
2793 }
2794 if (node->global.inlined_to == node)
2795 {
2796 error ("inlined_to pointer refers to itself");
2797 error_found = true;
2798 }
2799
2800 if (node->clone_of)
2801 {
2802 struct cgraph_node *n;
2803 for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
2804 if (n == node)
2805 break;
2806 if (!n)
2807 {
2808 error ("node has wrong clone_of");
2809 error_found = true;
2810 }
2811 }
2812 if (node->clones)
2813 {
2814 struct cgraph_node *n;
2815 for (n = node->clones; n; n = n->next_sibling_clone)
2816 if (n->clone_of != node)
2817 break;
2818 if (n)
2819 {
2820 error ("node has wrong clone list");
2821 error_found = true;
2822 }
2823 }
2824 if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
2825 {
2826 error ("node is in clone list but it is not clone");
2827 error_found = true;
2828 }
2829 if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
2830 {
2831 error ("node has wrong prev_clone pointer");
2832 error_found = true;
2833 }
2834 if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
2835 {
2836 error ("double linked list of clones corrupted");
2837 error_found = true;
2838 }
2839
2840 if (node->analyzed && node->alias)
2841 {
2842 bool ref_found = false;
2843 int i;
2844 struct ipa_ref *ref;
2845
2846 if (node->callees)
2847 {
2848 error ("Alias has call edges");
2849 error_found = true;
2850 }
2851 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list,
2852 i, ref); i++)
2853 if (ref->use != IPA_REF_ALIAS)
2854 {
2855 error ("Alias has non-alias reference");
2856 error_found = true;
2857 }
2858 else if (ref_found)
2859 {
2860 error ("Alias has more than one alias reference");
2861 error_found = true;
2862 }
2863 else
2864 ref_found = true;
2865 if (!ref_found)
2866 {
2867 error ("Analyzed alias has no reference");
2868 error_found = true;
2869 }
2870 }
2871 if (node->analyzed && node->thunk.thunk_p)
2872 {
2873 if (!node->callees)
2874 {
2875 error ("No edge out of thunk node");
2876 error_found = true;
2877 }
2878 else if (node->callees->next_callee)
2879 {
2880 error ("More than one edge out of thunk node");
2881 error_found = true;
2882 }
2883 if (gimple_has_body_p (node->decl))
2884 {
2885 error ("Thunk is not supposed to have body");
2886 error_found = true;
2887 }
2888 }
2889 else if (node->analyzed && gimple_has_body_p (node->decl)
2890 && !TREE_ASM_WRITTEN (node->decl)
2891 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
2892 && !flag_wpa)
2893 {
2894 if (this_cfun->cfg)
2895 {
2896 pointer_set_t *stmts = pointer_set_create ();
2897 int i;
2898 struct ipa_ref *ref;
2899
2900 /* Reach the trees by walking over the CFG, and note the
2901 enclosing basic-blocks in the call edges. */
2902 FOR_EACH_BB_FN (this_block, this_cfun)
2903 {
2904 for (gsi = gsi_start_phis (this_block);
2905 !gsi_end_p (gsi); gsi_next (&gsi))
2906 pointer_set_insert (stmts, gsi_stmt (gsi));
2907 for (gsi = gsi_start_bb (this_block);
2908 !gsi_end_p (gsi);
2909 gsi_next (&gsi))
2910 {
2911 gimple stmt = gsi_stmt (gsi);
2912 pointer_set_insert (stmts, stmt);
2913 if (is_gimple_call (stmt))
2914 {
2915 struct cgraph_edge *e = cgraph_edge (node, stmt);
2916 tree decl = gimple_call_fndecl (stmt);
2917 if (e)
2918 {
2919 if (e->aux)
2920 {
2921 error ("shared call_stmt:");
2922 cgraph_debug_gimple_stmt (this_cfun, stmt);
2923 error_found = true;
2924 }
2925 if (!e->indirect_unknown_callee)
2926 {
2927 if (verify_edge_corresponds_to_fndecl (e, decl))
2928 {
2929 error ("edge points to wrong declaration:");
2930 debug_tree (e->callee->decl);
2931 fprintf (stderr," Instead of:");
2932 debug_tree (decl);
2933 error_found = true;
2934 }
2935 }
2936 else if (decl)
2937 {
2938 error ("an indirect edge with unknown callee "
2939 "corresponding to a call_stmt with "
2940 "a known declaration:");
2941 error_found = true;
2942 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
2943 }
2944 e->aux = (void *)1;
2945 }
2946 else if (decl)
2947 {
2948 error ("missing callgraph edge for call stmt:");
2949 cgraph_debug_gimple_stmt (this_cfun, stmt);
2950 error_found = true;
2951 }
2952 }
2953 }
2954 }
2955 for (i = 0;
2956 ipa_ref_list_reference_iterate (&node->ref_list, i, ref);
2957 i++)
2958 if (ref->stmt && !pointer_set_contains (stmts, ref->stmt))
2959 {
2960 error ("reference to dead statement");
2961 cgraph_debug_gimple_stmt (this_cfun, ref->stmt);
2962 error_found = true;
2963 }
2964 pointer_set_destroy (stmts);
2965 }
2966 else
2967 /* No CFG available?! */
2968 gcc_unreachable ();
2969
2970 for (e = node->callees; e; e = e->next_callee)
2971 {
2972 if (!e->aux)
2973 {
2974 error ("edge %s->%s has no corresponding call_stmt",
2975 identifier_to_locale (e->caller->name ()),
2976 identifier_to_locale (e->callee->name ()));
2977 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
2978 error_found = true;
2979 }
2980 e->aux = 0;
2981 }
2982 for (e = node->indirect_calls; e; e = e->next_callee)
2983 {
2984 if (!e->aux && !e->speculative)
2985 {
2986 error ("an indirect edge from %s has no corresponding call_stmt",
2987 identifier_to_locale (e->caller->name ()));
2988 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
2989 error_found = true;
2990 }
2991 e->aux = 0;
2992 }
2993 }
2994 if (error_found)
2995 {
2996 dump_cgraph_node (stderr, node);
2997 internal_error ("verify_cgraph_node failed");
2998 }
2999 timevar_pop (TV_CGRAPH_VERIFY);
3000 }
3001
3002 /* Verify whole cgraph structure. */
3003 DEBUG_FUNCTION void
3004 verify_cgraph (void)
3005 {
3006 struct cgraph_node *node;
3007
3008 if (seen_error ())
3009 return;
3010
3011 FOR_EACH_FUNCTION (node)
3012 verify_cgraph_node (node);
3013 }
3014
3015 /* Given NODE, walk the alias chain to return the function NODE is alias of.
3016 Walk through thunk, too.
3017 When AVAILABILITY is non-NULL, get minimal availability in the chain. */
3018
3019 struct cgraph_node *
3020 cgraph_function_node (struct cgraph_node *node, enum availability *availability)
3021 {
3022 do
3023 {
3024 node = cgraph_function_or_thunk_node (node, availability);
3025 if (node->thunk.thunk_p)
3026 {
3027 node = node->callees->callee;
3028 if (availability)
3029 {
3030 enum availability a;
3031 a = cgraph_function_body_availability (node);
3032 if (a < *availability)
3033 *availability = a;
3034 }
3035 node = cgraph_function_or_thunk_node (node, availability);
3036 }
3037 } while (node && node->thunk.thunk_p);
3038 return node;
3039 }
3040
3041 /* When doing LTO, read NODE's body from disk if it is not already present. */
3042
3043 bool
3044 cgraph_get_body (struct cgraph_node *node)
3045 {
3046 struct lto_file_decl_data *file_data;
3047 const char *data, *name;
3048 size_t len;
3049 tree decl = node->decl;
3050
3051 if (DECL_RESULT (decl))
3052 return false;
3053
3054 gcc_assert (in_lto_p);
3055
3056 file_data = node->lto_file_data;
3057 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
3058
3059 /* We may have renamed the declaration, e.g., a static function. */
3060 name = lto_get_decl_name_mapping (file_data, name);
3061
3062 data = lto_get_section_data (file_data, LTO_section_function_body,
3063 name, &len);
3064 if (!data)
3065 {
3066 dump_cgraph_node (stderr, node);
3067 fatal_error ("%s: section %s is missing",
3068 file_data->file_name,
3069 name);
3070 }
3071
3072 gcc_assert (DECL_STRUCT_FUNCTION (decl) == NULL);
3073
3074 lto_input_function_body (file_data, node, data);
3075 lto_stats.num_function_bodies++;
3076 lto_free_section_data (file_data, LTO_section_function_body, name,
3077 data, len);
3078 lto_free_function_in_decl_state_for_node (node);
3079 return true;
3080 }
3081
3082 /* Verify if the type of the argument matches that of the function
3083 declaration. If we cannot verify this or there is a mismatch,
3084 return false. */
3085
3086 static bool
3087 gimple_check_call_args (gimple stmt, tree fndecl, bool args_count_match)
3088 {
3089 tree parms, p;
3090 unsigned int i, nargs;
3091
3092 /* Calls to internal functions always match their signature. */
3093 if (gimple_call_internal_p (stmt))
3094 return true;
3095
3096 nargs = gimple_call_num_args (stmt);
3097
3098 /* Get argument types for verification. */
3099 if (fndecl)
3100 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3101 else
3102 parms = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
3103
3104 /* Verify if the type of the argument matches that of the function
3105 declaration. If we cannot verify this or there is a mismatch,
3106 return false. */
3107 if (fndecl && DECL_ARGUMENTS (fndecl))
3108 {
3109 for (i = 0, p = DECL_ARGUMENTS (fndecl);
3110 i < nargs;
3111 i++, p = DECL_CHAIN (p))
3112 {
3113 tree arg;
3114 /* We cannot distinguish a varargs function from the case
3115 of excess parameters, still deferring the inlining decision
3116 to the callee is possible. */
3117 if (!p)
3118 break;
3119 arg = gimple_call_arg (stmt, i);
3120 if (p == error_mark_node
3121 || DECL_ARG_TYPE (p) == error_mark_node
3122 || arg == error_mark_node
3123 || (!types_compatible_p (DECL_ARG_TYPE (p), TREE_TYPE (arg))
3124 && !fold_convertible_p (DECL_ARG_TYPE (p), arg)))
3125 return false;
3126 }
3127 if (args_count_match && p)
3128 return false;
3129 }
3130 else if (parms)
3131 {
3132 for (i = 0, p = parms; i < nargs; i++, p = TREE_CHAIN (p))
3133 {
3134 tree arg;
3135 /* If this is a varargs function defer inlining decision
3136 to callee. */
3137 if (!p)
3138 break;
3139 arg = gimple_call_arg (stmt, i);
3140 if (TREE_VALUE (p) == error_mark_node
3141 || arg == error_mark_node
3142 || TREE_CODE (TREE_VALUE (p)) == VOID_TYPE
3143 || (!types_compatible_p (TREE_VALUE (p), TREE_TYPE (arg))
3144 && !fold_convertible_p (TREE_VALUE (p), arg)))
3145 return false;
3146 }
3147 }
3148 else
3149 {
3150 if (nargs != 0)
3151 return false;
3152 }
3153 return true;
3154 }
3155
3156 /* Verify if the type of the argument and lhs of CALL_STMT matches
3157 that of the function declaration CALLEE. If ARGS_COUNT_MATCH is
3158 true, the arg count needs to be the same.
3159 If we cannot verify this or there is a mismatch, return false. */
3160
3161 bool
3162 gimple_check_call_matching_types (gimple call_stmt, tree callee,
3163 bool args_count_match)
3164 {
3165 tree lhs;
3166
3167 if ((DECL_RESULT (callee)
3168 && !DECL_BY_REFERENCE (DECL_RESULT (callee))
3169 && (lhs = gimple_call_lhs (call_stmt)) != NULL_TREE
3170 && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee)),
3171 TREE_TYPE (lhs))
3172 && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee)), lhs))
3173 || !gimple_check_call_args (call_stmt, callee, args_count_match))
3174 return false;
3175 return true;
3176 }
3177
3178 #include "gt-cgraph.h"