stor-layout.c (finish_builtin_struct): Copy fields into the variants.
[gcc.git] / gcc / cgraph.c
1 /* Callgraph handling code.
2 Copyright (C) 2003-2014 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This file contains basic routines manipulating call graph
22
23 The call-graph is a data structure designed for intra-procedural optimization.
24 It represents a multi-graph where nodes are functions and edges are call sites. */
25
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "tree.h"
31 #include "varasm.h"
32 #include "calls.h"
33 #include "print-tree.h"
34 #include "tree-inline.h"
35 #include "langhooks.h"
36 #include "hashtab.h"
37 #include "toplev.h"
38 #include "flags.h"
39 #include "debug.h"
40 #include "target.h"
41 #include "cgraph.h"
42 #include "intl.h"
43 #include "tree-ssa-alias.h"
44 #include "internal-fn.h"
45 #include "tree-eh.h"
46 #include "gimple-expr.h"
47 #include "gimple.h"
48 #include "gimple-iterator.h"
49 #include "timevar.h"
50 #include "dumpfile.h"
51 #include "gimple-ssa.h"
52 #include "cgraph.h"
53 #include "tree-cfg.h"
54 #include "tree-ssa.h"
55 #include "value-prof.h"
56 #include "except.h"
57 #include "diagnostic-core.h"
58 #include "rtl.h"
59 #include "ipa-utils.h"
60 #include "lto-streamer.h"
61 #include "ipa-inline.h"
62 #include "cfgloop.h"
63 #include "gimple-pretty-print.h"
64 #include "expr.h"
65 #include "tree-dfa.h"
66
67 /* FIXME: Only for PROP_loops, but cgraph shouldn't have to know about this. */
68 #include "tree-pass.h"
69
70 static void cgraph_node_remove_callers (struct cgraph_node *node);
71 static inline void cgraph_edge_remove_caller (struct cgraph_edge *e);
72 static inline void cgraph_edge_remove_callee (struct cgraph_edge *e);
73
74 /* Queue of cgraph nodes scheduled to be lowered. */
75 symtab_node *x_cgraph_nodes_queue;
76 #define cgraph_nodes_queue ((struct cgraph_node *)x_cgraph_nodes_queue)
77
78 /* Number of nodes in existence. */
79 int cgraph_n_nodes;
80
81 /* Maximal uid used in cgraph nodes. */
82 int cgraph_max_uid;
83
84 /* Maximal uid used in cgraph edges. */
85 int cgraph_edge_max_uid;
86
87 /* Set when whole unit has been analyzed so we can access global info. */
88 bool cgraph_global_info_ready = false;
89
90 /* What state callgraph is in right now. */
91 enum cgraph_state cgraph_state = CGRAPH_STATE_PARSING;
92
93 /* Set when the cgraph is fully build and the basic flags are computed. */
94 bool cgraph_function_flags_ready = false;
95
96 /* List of hooks triggered on cgraph_edge events. */
97 struct cgraph_edge_hook_list {
98 cgraph_edge_hook hook;
99 void *data;
100 struct cgraph_edge_hook_list *next;
101 };
102
103 /* List of hooks triggered on cgraph_node events. */
104 struct cgraph_node_hook_list {
105 cgraph_node_hook hook;
106 void *data;
107 struct cgraph_node_hook_list *next;
108 };
109
110 /* List of hooks triggered on events involving two cgraph_edges. */
111 struct cgraph_2edge_hook_list {
112 cgraph_2edge_hook hook;
113 void *data;
114 struct cgraph_2edge_hook_list *next;
115 };
116
117 /* List of hooks triggered on events involving two cgraph_nodes. */
118 struct cgraph_2node_hook_list {
119 cgraph_2node_hook hook;
120 void *data;
121 struct cgraph_2node_hook_list *next;
122 };
123
124 /* List of hooks triggered when an edge is removed. */
125 struct cgraph_edge_hook_list *first_cgraph_edge_removal_hook;
126 /* List of hooks triggered when a node is removed. */
127 struct cgraph_node_hook_list *first_cgraph_node_removal_hook;
128 /* List of hooks triggered when an edge is duplicated. */
129 struct cgraph_2edge_hook_list *first_cgraph_edge_duplicated_hook;
130 /* List of hooks triggered when a node is duplicated. */
131 struct cgraph_2node_hook_list *first_cgraph_node_duplicated_hook;
132 /* List of hooks triggered when an function is inserted. */
133 struct cgraph_node_hook_list *first_cgraph_function_insertion_hook;
134
135 /* Head of a linked list of unused (freed) call graph nodes.
136 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
137 static GTY(()) struct cgraph_node *free_nodes;
138 /* Head of a linked list of unused (freed) call graph edges.
139 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
140 static GTY(()) struct cgraph_edge *free_edges;
141
142 /* Did procss_same_body_aliases run? */
143 bool cpp_implicit_aliases_done;
144
145 /* Map a cgraph_node to cgraph_function_version_info using this htab.
146 The cgraph_function_version_info has a THIS_NODE field that is the
147 corresponding cgraph_node.. */
148
149 static GTY((param_is (struct cgraph_function_version_info))) htab_t
150 cgraph_fnver_htab = NULL;
151
152 /* Hash function for cgraph_fnver_htab. */
153 static hashval_t
154 cgraph_fnver_htab_hash (const void *ptr)
155 {
156 int uid = ((const struct cgraph_function_version_info *)ptr)->this_node->uid;
157 return (hashval_t)(uid);
158 }
159
160 /* eq function for cgraph_fnver_htab. */
161 static int
162 cgraph_fnver_htab_eq (const void *p1, const void *p2)
163 {
164 const struct cgraph_function_version_info *n1
165 = (const struct cgraph_function_version_info *)p1;
166 const struct cgraph_function_version_info *n2
167 = (const struct cgraph_function_version_info *)p2;
168
169 return n1->this_node->uid == n2->this_node->uid;
170 }
171
172 /* Mark as GC root all allocated nodes. */
173 static GTY(()) struct cgraph_function_version_info *
174 version_info_node = NULL;
175
176 /* Get the cgraph_function_version_info node corresponding to node. */
177 struct cgraph_function_version_info *
178 get_cgraph_node_version (struct cgraph_node *node)
179 {
180 struct cgraph_function_version_info *ret;
181 struct cgraph_function_version_info key;
182 key.this_node = node;
183
184 if (cgraph_fnver_htab == NULL)
185 return NULL;
186
187 ret = (struct cgraph_function_version_info *)
188 htab_find (cgraph_fnver_htab, &key);
189
190 return ret;
191 }
192
193 /* Insert a new cgraph_function_version_info node into cgraph_fnver_htab
194 corresponding to cgraph_node NODE. */
195 struct cgraph_function_version_info *
196 insert_new_cgraph_node_version (struct cgraph_node *node)
197 {
198 void **slot;
199
200 version_info_node = NULL;
201 version_info_node = ggc_cleared_alloc<cgraph_function_version_info> ();
202 version_info_node->this_node = node;
203
204 if (cgraph_fnver_htab == NULL)
205 cgraph_fnver_htab = htab_create_ggc (2, cgraph_fnver_htab_hash,
206 cgraph_fnver_htab_eq, NULL);
207
208 slot = htab_find_slot (cgraph_fnver_htab, version_info_node, INSERT);
209 gcc_assert (slot != NULL);
210 *slot = version_info_node;
211 return version_info_node;
212 }
213
214 /* Remove the cgraph_function_version_info and cgraph_node for DECL. This
215 DECL is a duplicate declaration. */
216 void
217 delete_function_version (tree decl)
218 {
219 struct cgraph_node *decl_node = cgraph_get_node (decl);
220 struct cgraph_function_version_info *decl_v = NULL;
221
222 if (decl_node == NULL)
223 return;
224
225 decl_v = get_cgraph_node_version (decl_node);
226
227 if (decl_v == NULL)
228 return;
229
230 if (decl_v->prev != NULL)
231 decl_v->prev->next = decl_v->next;
232
233 if (decl_v->next != NULL)
234 decl_v->next->prev = decl_v->prev;
235
236 if (cgraph_fnver_htab != NULL)
237 htab_remove_elt (cgraph_fnver_htab, decl_v);
238
239 cgraph_remove_node (decl_node);
240 }
241
242 /* Record that DECL1 and DECL2 are semantically identical function
243 versions. */
244 void
245 record_function_versions (tree decl1, tree decl2)
246 {
247 struct cgraph_node *decl1_node = cgraph_get_create_node (decl1);
248 struct cgraph_node *decl2_node = cgraph_get_create_node (decl2);
249 struct cgraph_function_version_info *decl1_v = NULL;
250 struct cgraph_function_version_info *decl2_v = NULL;
251 struct cgraph_function_version_info *before;
252 struct cgraph_function_version_info *after;
253
254 gcc_assert (decl1_node != NULL && decl2_node != NULL);
255 decl1_v = get_cgraph_node_version (decl1_node);
256 decl2_v = get_cgraph_node_version (decl2_node);
257
258 if (decl1_v != NULL && decl2_v != NULL)
259 return;
260
261 if (decl1_v == NULL)
262 decl1_v = insert_new_cgraph_node_version (decl1_node);
263
264 if (decl2_v == NULL)
265 decl2_v = insert_new_cgraph_node_version (decl2_node);
266
267 /* Chain decl2_v and decl1_v. All semantically identical versions
268 will be chained together. */
269
270 before = decl1_v;
271 after = decl2_v;
272
273 while (before->next != NULL)
274 before = before->next;
275
276 while (after->prev != NULL)
277 after= after->prev;
278
279 before->next = after;
280 after->prev = before;
281 }
282
283 /* Macros to access the next item in the list of free cgraph nodes and
284 edges. */
285 #define NEXT_FREE_NODE(NODE) cgraph ((NODE)->next)
286 #define SET_NEXT_FREE_NODE(NODE,NODE2) ((NODE))->next = NODE2
287 #define NEXT_FREE_EDGE(EDGE) (EDGE)->prev_caller
288
289 /* Register HOOK to be called with DATA on each removed edge. */
290 struct cgraph_edge_hook_list *
291 cgraph_add_edge_removal_hook (cgraph_edge_hook hook, void *data)
292 {
293 struct cgraph_edge_hook_list *entry;
294 struct cgraph_edge_hook_list **ptr = &first_cgraph_edge_removal_hook;
295
296 entry = (struct cgraph_edge_hook_list *) xmalloc (sizeof (*entry));
297 entry->hook = hook;
298 entry->data = data;
299 entry->next = NULL;
300 while (*ptr)
301 ptr = &(*ptr)->next;
302 *ptr = entry;
303 return entry;
304 }
305
306 /* Remove ENTRY from the list of hooks called on removing edges. */
307 void
308 cgraph_remove_edge_removal_hook (struct cgraph_edge_hook_list *entry)
309 {
310 struct cgraph_edge_hook_list **ptr = &first_cgraph_edge_removal_hook;
311
312 while (*ptr != entry)
313 ptr = &(*ptr)->next;
314 *ptr = entry->next;
315 free (entry);
316 }
317
318 /* Call all edge removal hooks. */
319 static void
320 cgraph_call_edge_removal_hooks (struct cgraph_edge *e)
321 {
322 struct cgraph_edge_hook_list *entry = first_cgraph_edge_removal_hook;
323 while (entry)
324 {
325 entry->hook (e, entry->data);
326 entry = entry->next;
327 }
328 }
329
330 /* Register HOOK to be called with DATA on each removed node. */
331 struct cgraph_node_hook_list *
332 cgraph_add_node_removal_hook (cgraph_node_hook hook, void *data)
333 {
334 struct cgraph_node_hook_list *entry;
335 struct cgraph_node_hook_list **ptr = &first_cgraph_node_removal_hook;
336
337 entry = (struct cgraph_node_hook_list *) xmalloc (sizeof (*entry));
338 entry->hook = hook;
339 entry->data = data;
340 entry->next = NULL;
341 while (*ptr)
342 ptr = &(*ptr)->next;
343 *ptr = entry;
344 return entry;
345 }
346
347 /* Remove ENTRY from the list of hooks called on removing nodes. */
348 void
349 cgraph_remove_node_removal_hook (struct cgraph_node_hook_list *entry)
350 {
351 struct cgraph_node_hook_list **ptr = &first_cgraph_node_removal_hook;
352
353 while (*ptr != entry)
354 ptr = &(*ptr)->next;
355 *ptr = entry->next;
356 free (entry);
357 }
358
359 /* Call all node removal hooks. */
360 static void
361 cgraph_call_node_removal_hooks (struct cgraph_node *node)
362 {
363 struct cgraph_node_hook_list *entry = first_cgraph_node_removal_hook;
364 while (entry)
365 {
366 entry->hook (node, entry->data);
367 entry = entry->next;
368 }
369 }
370
371 /* Register HOOK to be called with DATA on each inserted node. */
372 struct cgraph_node_hook_list *
373 cgraph_add_function_insertion_hook (cgraph_node_hook hook, void *data)
374 {
375 struct cgraph_node_hook_list *entry;
376 struct cgraph_node_hook_list **ptr = &first_cgraph_function_insertion_hook;
377
378 entry = (struct cgraph_node_hook_list *) xmalloc (sizeof (*entry));
379 entry->hook = hook;
380 entry->data = data;
381 entry->next = NULL;
382 while (*ptr)
383 ptr = &(*ptr)->next;
384 *ptr = entry;
385 return entry;
386 }
387
388 /* Remove ENTRY from the list of hooks called on inserted nodes. */
389 void
390 cgraph_remove_function_insertion_hook (struct cgraph_node_hook_list *entry)
391 {
392 struct cgraph_node_hook_list **ptr = &first_cgraph_function_insertion_hook;
393
394 while (*ptr != entry)
395 ptr = &(*ptr)->next;
396 *ptr = entry->next;
397 free (entry);
398 }
399
400 /* Call all node insertion hooks. */
401 void
402 cgraph_call_function_insertion_hooks (struct cgraph_node *node)
403 {
404 struct cgraph_node_hook_list *entry = first_cgraph_function_insertion_hook;
405 while (entry)
406 {
407 entry->hook (node, entry->data);
408 entry = entry->next;
409 }
410 }
411
412 /* Register HOOK to be called with DATA on each duplicated edge. */
413 struct cgraph_2edge_hook_list *
414 cgraph_add_edge_duplication_hook (cgraph_2edge_hook hook, void *data)
415 {
416 struct cgraph_2edge_hook_list *entry;
417 struct cgraph_2edge_hook_list **ptr = &first_cgraph_edge_duplicated_hook;
418
419 entry = (struct cgraph_2edge_hook_list *) xmalloc (sizeof (*entry));
420 entry->hook = hook;
421 entry->data = data;
422 entry->next = NULL;
423 while (*ptr)
424 ptr = &(*ptr)->next;
425 *ptr = entry;
426 return entry;
427 }
428
429 /* Remove ENTRY from the list of hooks called on duplicating edges. */
430 void
431 cgraph_remove_edge_duplication_hook (struct cgraph_2edge_hook_list *entry)
432 {
433 struct cgraph_2edge_hook_list **ptr = &first_cgraph_edge_duplicated_hook;
434
435 while (*ptr != entry)
436 ptr = &(*ptr)->next;
437 *ptr = entry->next;
438 free (entry);
439 }
440
441 /* Call all edge duplication hooks. */
442 void
443 cgraph_call_edge_duplication_hooks (struct cgraph_edge *cs1,
444 struct cgraph_edge *cs2)
445 {
446 struct cgraph_2edge_hook_list *entry = first_cgraph_edge_duplicated_hook;
447 while (entry)
448 {
449 entry->hook (cs1, cs2, entry->data);
450 entry = entry->next;
451 }
452 }
453
454 /* Register HOOK to be called with DATA on each duplicated node. */
455 struct cgraph_2node_hook_list *
456 cgraph_add_node_duplication_hook (cgraph_2node_hook hook, void *data)
457 {
458 struct cgraph_2node_hook_list *entry;
459 struct cgraph_2node_hook_list **ptr = &first_cgraph_node_duplicated_hook;
460
461 entry = (struct cgraph_2node_hook_list *) xmalloc (sizeof (*entry));
462 entry->hook = hook;
463 entry->data = data;
464 entry->next = NULL;
465 while (*ptr)
466 ptr = &(*ptr)->next;
467 *ptr = entry;
468 return entry;
469 }
470
471 /* Remove ENTRY from the list of hooks called on duplicating nodes. */
472 void
473 cgraph_remove_node_duplication_hook (struct cgraph_2node_hook_list *entry)
474 {
475 struct cgraph_2node_hook_list **ptr = &first_cgraph_node_duplicated_hook;
476
477 while (*ptr != entry)
478 ptr = &(*ptr)->next;
479 *ptr = entry->next;
480 free (entry);
481 }
482
483 /* Call all node duplication hooks. */
484 void
485 cgraph_call_node_duplication_hooks (struct cgraph_node *node1,
486 struct cgraph_node *node2)
487 {
488 struct cgraph_2node_hook_list *entry = first_cgraph_node_duplicated_hook;
489 while (entry)
490 {
491 entry->hook (node1, node2, entry->data);
492 entry = entry->next;
493 }
494 }
495
496 /* Allocate new callgraph node. */
497
498 static inline struct cgraph_node *
499 cgraph_allocate_node (void)
500 {
501 struct cgraph_node *node;
502
503 if (free_nodes)
504 {
505 node = free_nodes;
506 free_nodes = NEXT_FREE_NODE (node);
507 }
508 else
509 {
510 node = ggc_cleared_alloc<cgraph_node> ();
511 node->uid = cgraph_max_uid++;
512 }
513
514 return node;
515 }
516
517 /* Allocate new callgraph node and insert it into basic data structures. */
518
519 struct cgraph_node *
520 cgraph_create_empty_node (void)
521 {
522 struct cgraph_node *node = cgraph_allocate_node ();
523
524 node->type = SYMTAB_FUNCTION;
525 node->frequency = NODE_FREQUENCY_NORMAL;
526 node->count_materialization_scale = REG_BR_PROB_BASE;
527 cgraph_n_nodes++;
528 return node;
529 }
530
531 /* Return cgraph node assigned to DECL. Create new one when needed. */
532
533 struct cgraph_node *
534 cgraph_create_node (tree decl)
535 {
536 struct cgraph_node *node = cgraph_create_empty_node ();
537 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
538
539 node->decl = decl;
540 symtab_register_node (node);
541
542 if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
543 {
544 node->origin = cgraph_get_create_node (DECL_CONTEXT (decl));
545 node->next_nested = node->origin->nested;
546 node->origin->nested = node;
547 }
548 return node;
549 }
550
551 /* Try to find a call graph node for declaration DECL and if it does not exist
552 or if it corresponds to an inline clone, create a new one. */
553
554 struct cgraph_node *
555 cgraph_get_create_node (tree decl)
556 {
557 struct cgraph_node *first_clone = cgraph_get_node (decl);
558
559 if (first_clone && !first_clone->global.inlined_to)
560 return first_clone;
561
562 struct cgraph_node *node = cgraph_create_node (decl);
563 if (first_clone)
564 {
565 first_clone->clone_of = node;
566 node->clones = first_clone;
567 symtab_prevail_in_asm_name_hash (node);
568 node->decl->decl_with_vis.symtab_node = node;
569 if (dump_file)
570 fprintf (dump_file, "Introduced new external node "
571 "(%s/%i) and turned into root of the clone tree.\n",
572 xstrdup (node->name ()), node->order);
573 }
574 else if (dump_file)
575 fprintf (dump_file, "Introduced new external node "
576 "(%s/%i).\n", xstrdup (node->name ()),
577 node->order);
578 return node;
579 }
580
581 /* Mark ALIAS as an alias to DECL. DECL_NODE is cgraph node representing
582 the function body is associated with (not necessarily cgraph_node (DECL). */
583
584 struct cgraph_node *
585 cgraph_create_function_alias (tree alias, tree target)
586 {
587 struct cgraph_node *alias_node;
588
589 gcc_assert (TREE_CODE (target) == FUNCTION_DECL
590 || TREE_CODE (target) == IDENTIFIER_NODE);
591 gcc_assert (TREE_CODE (alias) == FUNCTION_DECL);
592 alias_node = cgraph_get_create_node (alias);
593 gcc_assert (!alias_node->definition);
594 alias_node->alias_target = target;
595 alias_node->definition = true;
596 alias_node->alias = true;
597 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (alias)) != NULL)
598 alias_node->weakref = true;
599 return alias_node;
600 }
601
602 /* Attempt to mark ALIAS as an alias to DECL. Return alias node if successful
603 and NULL otherwise.
604 Same body aliases are output whenever the body of DECL is output,
605 and cgraph_get_node (ALIAS) transparently returns cgraph_get_node (DECL). */
606
607 struct cgraph_node *
608 cgraph_same_body_alias (struct cgraph_node *decl_node ATTRIBUTE_UNUSED, tree alias, tree decl)
609 {
610 struct cgraph_node *n;
611 #ifndef ASM_OUTPUT_DEF
612 /* If aliases aren't supported by the assembler, fail. */
613 return NULL;
614 #endif
615 /* Langhooks can create same body aliases of symbols not defined.
616 Those are useless. Drop them on the floor. */
617 if (cgraph_global_info_ready)
618 return NULL;
619
620 n = cgraph_create_function_alias (alias, decl);
621 n->cpp_implicit_alias = true;
622 if (cpp_implicit_aliases_done)
623 symtab_resolve_alias (n,
624 cgraph_get_node (decl));
625 return n;
626 }
627
628 /* Add thunk alias into callgraph. The alias declaration is ALIAS and it
629 aliases DECL with an adjustments made into the first parameter.
630 See comments in thunk_adjust for detail on the parameters. */
631
632 struct cgraph_node *
633 cgraph_add_thunk (struct cgraph_node *decl_node ATTRIBUTE_UNUSED,
634 tree alias, tree decl ATTRIBUTE_UNUSED,
635 bool this_adjusting,
636 HOST_WIDE_INT fixed_offset, HOST_WIDE_INT virtual_value,
637 tree virtual_offset,
638 tree real_alias)
639 {
640 struct cgraph_node *node;
641
642 node = cgraph_get_node (alias);
643 if (node)
644 cgraph_reset_node (node);
645 else
646 node = cgraph_create_node (alias);
647 gcc_checking_assert (!virtual_offset
648 || wi::eq_p (virtual_offset, virtual_value));
649 node->thunk.fixed_offset = fixed_offset;
650 node->thunk.this_adjusting = this_adjusting;
651 node->thunk.virtual_value = virtual_value;
652 node->thunk.virtual_offset_p = virtual_offset != NULL;
653 node->thunk.alias = real_alias;
654 node->thunk.thunk_p = true;
655 node->definition = true;
656
657 return node;
658 }
659
660 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
661 Return NULL if there's no such node. */
662
663 struct cgraph_node *
664 cgraph_node_for_asm (tree asmname)
665 {
666 /* We do not want to look at inline clones. */
667 for (symtab_node *node = symtab_node_for_asm (asmname);
668 node;
669 node = node->next_sharing_asm_name)
670 {
671 cgraph_node *cn = dyn_cast <cgraph_node *> (node);
672 if (cn && !cn->global.inlined_to)
673 return cn;
674 }
675 return NULL;
676 }
677
678 /* Returns a hash value for X (which really is a cgraph_edge). */
679
680 static hashval_t
681 edge_hash (const void *x)
682 {
683 return htab_hash_pointer (((const struct cgraph_edge *) x)->call_stmt);
684 }
685
686 /* Return nonzero if the call_stmt of of cgraph_edge X is stmt *Y. */
687
688 static int
689 edge_eq (const void *x, const void *y)
690 {
691 return ((const struct cgraph_edge *) x)->call_stmt == y;
692 }
693
694 /* Add call graph edge E to call site hash of its caller. */
695
696 static inline void
697 cgraph_update_edge_in_call_site_hash (struct cgraph_edge *e)
698 {
699 void **slot;
700 slot = htab_find_slot_with_hash (e->caller->call_site_hash,
701 e->call_stmt,
702 htab_hash_pointer (e->call_stmt),
703 INSERT);
704 *slot = e;
705 }
706
707 /* Add call graph edge E to call site hash of its caller. */
708
709 static inline void
710 cgraph_add_edge_to_call_site_hash (struct cgraph_edge *e)
711 {
712 void **slot;
713 /* There are two speculative edges for every statement (one direct,
714 one indirect); always hash the direct one. */
715 if (e->speculative && e->indirect_unknown_callee)
716 return;
717 slot = htab_find_slot_with_hash (e->caller->call_site_hash,
718 e->call_stmt,
719 htab_hash_pointer (e->call_stmt),
720 INSERT);
721 if (*slot)
722 {
723 gcc_assert (((struct cgraph_edge *)*slot)->speculative);
724 if (e->callee)
725 *slot = e;
726 return;
727 }
728 gcc_assert (!*slot || e->speculative);
729 *slot = e;
730 }
731
732 /* Return the callgraph edge representing the GIMPLE_CALL statement
733 CALL_STMT. */
734
735 struct cgraph_edge *
736 cgraph_edge (struct cgraph_node *node, gimple call_stmt)
737 {
738 struct cgraph_edge *e, *e2;
739 int n = 0;
740
741 if (node->call_site_hash)
742 return (struct cgraph_edge *)
743 htab_find_with_hash (node->call_site_hash, call_stmt,
744 htab_hash_pointer (call_stmt));
745
746 /* This loop may turn out to be performance problem. In such case adding
747 hashtables into call nodes with very many edges is probably best
748 solution. It is not good idea to add pointer into CALL_EXPR itself
749 because we want to make possible having multiple cgraph nodes representing
750 different clones of the same body before the body is actually cloned. */
751 for (e = node->callees; e; e = e->next_callee)
752 {
753 if (e->call_stmt == call_stmt)
754 break;
755 n++;
756 }
757
758 if (!e)
759 for (e = node->indirect_calls; e; e = e->next_callee)
760 {
761 if (e->call_stmt == call_stmt)
762 break;
763 n++;
764 }
765
766 if (n > 100)
767 {
768 node->call_site_hash = htab_create_ggc (120, edge_hash, edge_eq, NULL);
769 for (e2 = node->callees; e2; e2 = e2->next_callee)
770 cgraph_add_edge_to_call_site_hash (e2);
771 for (e2 = node->indirect_calls; e2; e2 = e2->next_callee)
772 cgraph_add_edge_to_call_site_hash (e2);
773 }
774
775 return e;
776 }
777
778
779 /* Change field call_stmt of edge E to NEW_STMT.
780 If UPDATE_SPECULATIVE and E is any component of speculative
781 edge, then update all components. */
782
783 void
784 cgraph_set_call_stmt (struct cgraph_edge *e, gimple new_stmt,
785 bool update_speculative)
786 {
787 tree decl;
788
789 /* Speculative edges has three component, update all of them
790 when asked to. */
791 if (update_speculative && e->speculative)
792 {
793 struct cgraph_edge *direct, *indirect;
794 struct ipa_ref *ref;
795
796 cgraph_speculative_call_info (e, direct, indirect, ref);
797 cgraph_set_call_stmt (direct, new_stmt, false);
798 cgraph_set_call_stmt (indirect, new_stmt, false);
799 ref->stmt = new_stmt;
800 return;
801 }
802
803 /* Only direct speculative edges go to call_site_hash. */
804 if (e->caller->call_site_hash
805 && (!e->speculative || !e->indirect_unknown_callee))
806 {
807 htab_remove_elt_with_hash (e->caller->call_site_hash,
808 e->call_stmt,
809 htab_hash_pointer (e->call_stmt));
810 }
811
812 e->call_stmt = new_stmt;
813 if (e->indirect_unknown_callee
814 && (decl = gimple_call_fndecl (new_stmt)))
815 {
816 /* Constant propagation (and possibly also inlining?) can turn an
817 indirect call into a direct one. */
818 struct cgraph_node *new_callee = cgraph_get_node (decl);
819
820 gcc_checking_assert (new_callee);
821 e = cgraph_make_edge_direct (e, new_callee);
822 }
823
824 push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
825 e->can_throw_external = stmt_can_throw_external (new_stmt);
826 pop_cfun ();
827 if (e->caller->call_site_hash)
828 cgraph_add_edge_to_call_site_hash (e);
829 }
830
831 /* Allocate a cgraph_edge structure and fill it with data according to the
832 parameters of which only CALLEE can be NULL (when creating an indirect call
833 edge). */
834
835 static struct cgraph_edge *
836 cgraph_create_edge_1 (struct cgraph_node *caller, struct cgraph_node *callee,
837 gimple call_stmt, gcov_type count, int freq,
838 bool indir_unknown_callee)
839 {
840 struct cgraph_edge *edge;
841
842 /* LTO does not actually have access to the call_stmt since these
843 have not been loaded yet. */
844 if (call_stmt)
845 {
846 /* This is a rather expensive check possibly triggering
847 construction of call stmt hashtable. */
848 #ifdef ENABLE_CHECKING
849 struct cgraph_edge *e;
850 gcc_checking_assert (!(e=cgraph_edge (caller, call_stmt)) || e->speculative);
851 #endif
852
853 gcc_assert (is_gimple_call (call_stmt));
854 }
855
856 if (free_edges)
857 {
858 edge = free_edges;
859 free_edges = NEXT_FREE_EDGE (edge);
860 }
861 else
862 {
863 edge = ggc_alloc<struct cgraph_edge> ();
864 edge->uid = cgraph_edge_max_uid++;
865 }
866
867 edge->aux = NULL;
868 edge->caller = caller;
869 edge->callee = callee;
870 edge->prev_caller = NULL;
871 edge->next_caller = NULL;
872 edge->prev_callee = NULL;
873 edge->next_callee = NULL;
874 edge->lto_stmt_uid = 0;
875
876 edge->count = count;
877 gcc_assert (count >= 0);
878 edge->frequency = freq;
879 gcc_assert (freq >= 0);
880 gcc_assert (freq <= CGRAPH_FREQ_MAX);
881
882 edge->call_stmt = call_stmt;
883 push_cfun (DECL_STRUCT_FUNCTION (caller->decl));
884 edge->can_throw_external
885 = call_stmt ? stmt_can_throw_external (call_stmt) : false;
886 pop_cfun ();
887 if (call_stmt
888 && callee && callee->decl
889 && !gimple_check_call_matching_types (call_stmt, callee->decl,
890 false))
891 edge->call_stmt_cannot_inline_p = true;
892 else
893 edge->call_stmt_cannot_inline_p = false;
894
895 edge->indirect_info = NULL;
896 edge->indirect_inlining_edge = 0;
897 edge->speculative = false;
898 edge->indirect_unknown_callee = indir_unknown_callee;
899 if (call_stmt && caller->call_site_hash)
900 cgraph_add_edge_to_call_site_hash (edge);
901
902 return edge;
903 }
904
905 /* Create edge from CALLER to CALLEE in the cgraph. */
906
907 struct cgraph_edge *
908 cgraph_create_edge (struct cgraph_node *caller, struct cgraph_node *callee,
909 gimple call_stmt, gcov_type count, int freq)
910 {
911 struct cgraph_edge *edge = cgraph_create_edge_1 (caller, callee, call_stmt,
912 count, freq, false);
913
914 initialize_inline_failed (edge);
915
916 edge->next_caller = callee->callers;
917 if (callee->callers)
918 callee->callers->prev_caller = edge;
919 edge->next_callee = caller->callees;
920 if (caller->callees)
921 caller->callees->prev_callee = edge;
922 caller->callees = edge;
923 callee->callers = edge;
924
925 return edge;
926 }
927
928 /* Allocate cgraph_indirect_call_info and set its fields to default values. */
929
930 struct cgraph_indirect_call_info *
931 cgraph_allocate_init_indirect_info (void)
932 {
933 struct cgraph_indirect_call_info *ii;
934
935 ii = ggc_cleared_alloc<cgraph_indirect_call_info> ();
936 ii->param_index = -1;
937 return ii;
938 }
939
940 /* Create an indirect edge with a yet-undetermined callee where the call
941 statement destination is a formal parameter of the caller with index
942 PARAM_INDEX. */
943
944 struct cgraph_edge *
945 cgraph_create_indirect_edge (struct cgraph_node *caller, gimple call_stmt,
946 int ecf_flags,
947 gcov_type count, int freq)
948 {
949 struct cgraph_edge *edge = cgraph_create_edge_1 (caller, NULL, call_stmt,
950 count, freq, true);
951 tree target;
952
953 initialize_inline_failed (edge);
954
955 edge->indirect_info = cgraph_allocate_init_indirect_info ();
956 edge->indirect_info->ecf_flags = ecf_flags;
957
958 /* Record polymorphic call info. */
959 if (call_stmt
960 && (target = gimple_call_fn (call_stmt))
961 && virtual_method_call_p (target))
962 {
963 tree otr_type;
964 HOST_WIDE_INT otr_token;
965 ipa_polymorphic_call_context context;
966
967 get_polymorphic_call_info (caller->decl,
968 target,
969 &otr_type, &otr_token,
970 &context);
971
972 /* Only record types can have virtual calls. */
973 gcc_assert (TREE_CODE (otr_type) == RECORD_TYPE);
974 edge->indirect_info->polymorphic = true;
975 edge->indirect_info->param_index = -1;
976 edge->indirect_info->otr_token = otr_token;
977 edge->indirect_info->otr_type = otr_type;
978 edge->indirect_info->outer_type = context.outer_type;
979 edge->indirect_info->offset = context.offset;
980 edge->indirect_info->maybe_in_construction
981 = context.maybe_in_construction;
982 edge->indirect_info->maybe_derived_type = context.maybe_derived_type;
983 }
984
985 edge->next_callee = caller->indirect_calls;
986 if (caller->indirect_calls)
987 caller->indirect_calls->prev_callee = edge;
988 caller->indirect_calls = edge;
989
990 return edge;
991 }
992
993 /* Remove the edge E from the list of the callers of the callee. */
994
995 static inline void
996 cgraph_edge_remove_callee (struct cgraph_edge *e)
997 {
998 gcc_assert (!e->indirect_unknown_callee);
999 if (e->prev_caller)
1000 e->prev_caller->next_caller = e->next_caller;
1001 if (e->next_caller)
1002 e->next_caller->prev_caller = e->prev_caller;
1003 if (!e->prev_caller)
1004 e->callee->callers = e->next_caller;
1005 }
1006
1007 /* Remove the edge E from the list of the callees of the caller. */
1008
1009 static inline void
1010 cgraph_edge_remove_caller (struct cgraph_edge *e)
1011 {
1012 if (e->prev_callee)
1013 e->prev_callee->next_callee = e->next_callee;
1014 if (e->next_callee)
1015 e->next_callee->prev_callee = e->prev_callee;
1016 if (!e->prev_callee)
1017 {
1018 if (e->indirect_unknown_callee)
1019 e->caller->indirect_calls = e->next_callee;
1020 else
1021 e->caller->callees = e->next_callee;
1022 }
1023 if (e->caller->call_site_hash)
1024 htab_remove_elt_with_hash (e->caller->call_site_hash,
1025 e->call_stmt,
1026 htab_hash_pointer (e->call_stmt));
1027 }
1028
1029 /* Put the edge onto the free list. */
1030
1031 static void
1032 cgraph_free_edge (struct cgraph_edge *e)
1033 {
1034 int uid = e->uid;
1035
1036 if (e->indirect_info)
1037 ggc_free (e->indirect_info);
1038
1039 /* Clear out the edge so we do not dangle pointers. */
1040 memset (e, 0, sizeof (*e));
1041 e->uid = uid;
1042 NEXT_FREE_EDGE (e) = free_edges;
1043 free_edges = e;
1044 }
1045
1046 /* Remove the edge E in the cgraph. */
1047
1048 void
1049 cgraph_remove_edge (struct cgraph_edge *e)
1050 {
1051 /* Call all edge removal hooks. */
1052 cgraph_call_edge_removal_hooks (e);
1053
1054 if (!e->indirect_unknown_callee)
1055 /* Remove from callers list of the callee. */
1056 cgraph_edge_remove_callee (e);
1057
1058 /* Remove from callees list of the callers. */
1059 cgraph_edge_remove_caller (e);
1060
1061 /* Put the edge onto the free list. */
1062 cgraph_free_edge (e);
1063 }
1064
1065 /* Set callee of call graph edge E and add it to the corresponding set of
1066 callers. */
1067
1068 static void
1069 cgraph_set_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
1070 {
1071 e->prev_caller = NULL;
1072 if (n->callers)
1073 n->callers->prev_caller = e;
1074 e->next_caller = n->callers;
1075 n->callers = e;
1076 e->callee = n;
1077 }
1078
1079 /* Turn edge E into speculative call calling N2. Update
1080 the profile so the direct call is taken COUNT times
1081 with FREQUENCY.
1082
1083 At clone materialization time, the indirect call E will
1084 be expanded as:
1085
1086 if (call_dest == N2)
1087 n2 ();
1088 else
1089 call call_dest
1090
1091 At this time the function just creates the direct call,
1092 the referencd representing the if conditional and attaches
1093 them all to the orginal indirect call statement.
1094
1095 Return direct edge created. */
1096
1097 struct cgraph_edge *
1098 cgraph_turn_edge_to_speculative (struct cgraph_edge *e,
1099 struct cgraph_node *n2,
1100 gcov_type direct_count,
1101 int direct_frequency)
1102 {
1103 struct cgraph_node *n = e->caller;
1104 struct ipa_ref *ref = NULL;
1105 struct cgraph_edge *e2;
1106
1107 if (dump_file)
1108 {
1109 fprintf (dump_file, "Indirect call -> speculative call"
1110 " %s/%i => %s/%i\n",
1111 xstrdup (n->name ()), n->order,
1112 xstrdup (n2->name ()), n2->order);
1113 }
1114 e->speculative = true;
1115 e2 = cgraph_create_edge (n, n2, e->call_stmt, direct_count, direct_frequency);
1116 initialize_inline_failed (e2);
1117 e2->speculative = true;
1118 if (TREE_NOTHROW (n2->decl))
1119 e2->can_throw_external = false;
1120 else
1121 e2->can_throw_external = e->can_throw_external;
1122 e2->lto_stmt_uid = e->lto_stmt_uid;
1123 e->count -= e2->count;
1124 e->frequency -= e2->frequency;
1125 cgraph_call_edge_duplication_hooks (e, e2);
1126 ref = n->add_reference (n2, IPA_REF_ADDR, e->call_stmt);
1127 ref->lto_stmt_uid = e->lto_stmt_uid;
1128 ref->speculative = e->speculative;
1129 cgraph_mark_address_taken_node (n2);
1130 return e2;
1131 }
1132
1133 /* Speculative call consist of three components:
1134 1) an indirect edge representing the original call
1135 2) an direct edge representing the new call
1136 3) ADDR_EXPR reference representing the speculative check.
1137 All three components are attached to single statement (the indirect
1138 call) and if one of them exists, all of them must exist.
1139
1140 Given speculative call edge E, return all three components.
1141 */
1142
1143 void
1144 cgraph_speculative_call_info (struct cgraph_edge *e,
1145 struct cgraph_edge *&direct,
1146 struct cgraph_edge *&indirect,
1147 struct ipa_ref *&reference)
1148 {
1149 struct ipa_ref *ref;
1150 int i;
1151 struct cgraph_edge *e2;
1152
1153 if (!e->indirect_unknown_callee)
1154 for (e2 = e->caller->indirect_calls;
1155 e2->call_stmt != e->call_stmt || e2->lto_stmt_uid != e->lto_stmt_uid;
1156 e2 = e2->next_callee)
1157 ;
1158 else
1159 {
1160 e2 = e;
1161 /* We can take advantage of the call stmt hash. */
1162 if (e2->call_stmt)
1163 {
1164 e = cgraph_edge (e->caller, e2->call_stmt);
1165 gcc_assert (e->speculative && !e->indirect_unknown_callee);
1166 }
1167 else
1168 for (e = e->caller->callees;
1169 e2->call_stmt != e->call_stmt
1170 || e2->lto_stmt_uid != e->lto_stmt_uid;
1171 e = e->next_callee)
1172 ;
1173 }
1174 gcc_assert (e->speculative && e2->speculative);
1175 direct = e;
1176 indirect = e2;
1177
1178 reference = NULL;
1179 for (i = 0; e->caller->iterate_reference (i, ref); i++)
1180 if (ref->speculative
1181 && ((ref->stmt && ref->stmt == e->call_stmt)
1182 || (!ref->stmt && ref->lto_stmt_uid == e->lto_stmt_uid)))
1183 {
1184 reference = ref;
1185 break;
1186 }
1187
1188 /* Speculative edge always consist of all three components - direct edge,
1189 indirect and reference. */
1190
1191 gcc_assert (e && e2 && ref);
1192 }
1193
1194 /* Redirect callee of E to N. The function does not update underlying
1195 call expression. */
1196
1197 void
1198 cgraph_redirect_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
1199 {
1200 /* Remove from callers list of the current callee. */
1201 cgraph_edge_remove_callee (e);
1202
1203 /* Insert to callers list of the new callee. */
1204 cgraph_set_edge_callee (e, n);
1205 }
1206
1207 /* Speculative call EDGE turned out to be direct call to CALLE_DECL.
1208 Remove the speculative call sequence and return edge representing the call.
1209 It is up to caller to redirect the call as appropriate. */
1210
1211 struct cgraph_edge *
1212 cgraph_resolve_speculation (struct cgraph_edge *edge, tree callee_decl)
1213 {
1214 struct cgraph_edge *e2;
1215 struct ipa_ref *ref;
1216
1217 gcc_assert (edge->speculative);
1218 cgraph_speculative_call_info (edge, e2, edge, ref);
1219 if (!callee_decl
1220 || !symtab_semantically_equivalent_p (ref->referred,
1221 symtab_get_node (callee_decl)))
1222 {
1223 if (dump_file)
1224 {
1225 if (callee_decl)
1226 {
1227 fprintf (dump_file, "Speculative indirect call %s/%i => %s/%i has "
1228 "turned out to have contradicting known target ",
1229 xstrdup (edge->caller->name ()), edge->caller->order,
1230 xstrdup (e2->callee->name ()), e2->callee->order);
1231 print_generic_expr (dump_file, callee_decl, 0);
1232 fprintf (dump_file, "\n");
1233 }
1234 else
1235 {
1236 fprintf (dump_file, "Removing speculative call %s/%i => %s/%i\n",
1237 xstrdup (edge->caller->name ()), edge->caller->order,
1238 xstrdup (e2->callee->name ()), e2->callee->order);
1239 }
1240 }
1241 }
1242 else
1243 {
1244 struct cgraph_edge *tmp = edge;
1245 if (dump_file)
1246 fprintf (dump_file, "Speculative call turned into direct call.\n");
1247 edge = e2;
1248 e2 = tmp;
1249 /* FIXME: If EDGE is inlined, we should scale up the frequencies and counts
1250 in the functions inlined through it. */
1251 }
1252 edge->count += e2->count;
1253 edge->frequency += e2->frequency;
1254 if (edge->frequency > CGRAPH_FREQ_MAX)
1255 edge->frequency = CGRAPH_FREQ_MAX;
1256 edge->speculative = false;
1257 e2->speculative = false;
1258 ref->remove_reference ();
1259 if (e2->indirect_unknown_callee || e2->inline_failed)
1260 cgraph_remove_edge (e2);
1261 else
1262 cgraph_remove_node_and_inline_clones (e2->callee, NULL);
1263 if (edge->caller->call_site_hash)
1264 cgraph_update_edge_in_call_site_hash (edge);
1265 return edge;
1266 }
1267
1268 /* Make an indirect EDGE with an unknown callee an ordinary edge leading to
1269 CALLEE. DELTA is an integer constant that is to be added to the this
1270 pointer (first parameter) to compensate for skipping a thunk adjustment. */
1271
1272 struct cgraph_edge *
1273 cgraph_make_edge_direct (struct cgraph_edge *edge, struct cgraph_node *callee)
1274 {
1275 gcc_assert (edge->indirect_unknown_callee);
1276
1277 /* If we are redirecting speculative call, make it non-speculative. */
1278 if (edge->indirect_unknown_callee && edge->speculative)
1279 {
1280 edge = cgraph_resolve_speculation (edge, callee->decl);
1281
1282 /* On successful speculation just return the pre existing direct edge. */
1283 if (!edge->indirect_unknown_callee)
1284 return edge;
1285 }
1286
1287 edge->indirect_unknown_callee = 0;
1288 ggc_free (edge->indirect_info);
1289 edge->indirect_info = NULL;
1290
1291 /* Get the edge out of the indirect edge list. */
1292 if (edge->prev_callee)
1293 edge->prev_callee->next_callee = edge->next_callee;
1294 if (edge->next_callee)
1295 edge->next_callee->prev_callee = edge->prev_callee;
1296 if (!edge->prev_callee)
1297 edge->caller->indirect_calls = edge->next_callee;
1298
1299 /* Put it into the normal callee list */
1300 edge->prev_callee = NULL;
1301 edge->next_callee = edge->caller->callees;
1302 if (edge->caller->callees)
1303 edge->caller->callees->prev_callee = edge;
1304 edge->caller->callees = edge;
1305
1306 /* Insert to callers list of the new callee. */
1307 cgraph_set_edge_callee (edge, callee);
1308
1309 if (edge->call_stmt)
1310 edge->call_stmt_cannot_inline_p
1311 = !gimple_check_call_matching_types (edge->call_stmt, callee->decl,
1312 false);
1313
1314 /* We need to re-determine the inlining status of the edge. */
1315 initialize_inline_failed (edge);
1316 return edge;
1317 }
1318
1319 /* If necessary, change the function declaration in the call statement
1320 associated with E so that it corresponds to the edge callee. */
1321
1322 gimple
1323 cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e)
1324 {
1325 tree decl = gimple_call_fndecl (e->call_stmt);
1326 tree lhs = gimple_call_lhs (e->call_stmt);
1327 gimple new_stmt;
1328 gimple_stmt_iterator gsi;
1329 #ifdef ENABLE_CHECKING
1330 struct cgraph_node *node;
1331 #endif
1332
1333 if (e->speculative)
1334 {
1335 struct cgraph_edge *e2;
1336 gimple new_stmt;
1337 struct ipa_ref *ref;
1338
1339 cgraph_speculative_call_info (e, e, e2, ref);
1340 /* If there already is an direct call (i.e. as a result of inliner's
1341 substitution), forget about speculating. */
1342 if (decl)
1343 e = cgraph_resolve_speculation (e, decl);
1344 /* If types do not match, speculation was likely wrong.
1345 The direct edge was posisbly redirected to the clone with a different
1346 signature. We did not update the call statement yet, so compare it
1347 with the reference that still points to the proper type. */
1348 else if (!gimple_check_call_matching_types (e->call_stmt,
1349 ref->referred->decl,
1350 true))
1351 {
1352 if (dump_file)
1353 fprintf (dump_file, "Not expanding speculative call of %s/%i -> %s/%i\n"
1354 "Type mismatch.\n",
1355 xstrdup (e->caller->name ()),
1356 e->caller->order,
1357 xstrdup (e->callee->name ()),
1358 e->callee->order);
1359 e = cgraph_resolve_speculation (e, NULL);
1360 /* We are producing the final function body and will throw away the
1361 callgraph edges really soon. Reset the counts/frequencies to
1362 keep verifier happy in the case of roundoff errors. */
1363 e->count = gimple_bb (e->call_stmt)->count;
1364 e->frequency = compute_call_stmt_bb_frequency
1365 (e->caller->decl, gimple_bb (e->call_stmt));
1366 }
1367 /* Expand speculation into GIMPLE code. */
1368 else
1369 {
1370 if (dump_file)
1371 fprintf (dump_file,
1372 "Expanding speculative call of %s/%i -> %s/%i count:"
1373 "%"PRId64"\n",
1374 xstrdup (e->caller->name ()),
1375 e->caller->order,
1376 xstrdup (e->callee->name ()),
1377 e->callee->order,
1378 (int64_t)e->count);
1379 gcc_assert (e2->speculative);
1380 push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
1381 new_stmt = gimple_ic (e->call_stmt, cgraph (ref->referred),
1382 e->count || e2->count
1383 ? RDIV (e->count * REG_BR_PROB_BASE,
1384 e->count + e2->count)
1385 : e->frequency || e2->frequency
1386 ? RDIV (e->frequency * REG_BR_PROB_BASE,
1387 e->frequency + e2->frequency)
1388 : REG_BR_PROB_BASE / 2,
1389 e->count, e->count + e2->count);
1390 e->speculative = false;
1391 cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt,
1392 new_stmt, false);
1393 e->frequency = compute_call_stmt_bb_frequency
1394 (e->caller->decl, gimple_bb (e->call_stmt));
1395 e2->frequency = compute_call_stmt_bb_frequency
1396 (e2->caller->decl, gimple_bb (e2->call_stmt));
1397 e2->speculative = false;
1398 ref->speculative = false;
1399 ref->stmt = NULL;
1400 /* Indirect edges are not both in the call site hash.
1401 get it updated. */
1402 if (e->caller->call_site_hash)
1403 cgraph_update_edge_in_call_site_hash (e2);
1404 pop_cfun ();
1405 /* Continue redirecting E to proper target. */
1406 }
1407 }
1408
1409 if (e->indirect_unknown_callee
1410 || decl == e->callee->decl)
1411 return e->call_stmt;
1412
1413 #ifdef ENABLE_CHECKING
1414 if (decl)
1415 {
1416 node = cgraph_get_node (decl);
1417 gcc_assert (!node || !node->clone.combined_args_to_skip);
1418 }
1419 #endif
1420
1421 if (cgraph_dump_file)
1422 {
1423 fprintf (cgraph_dump_file, "updating call of %s/%i -> %s/%i: ",
1424 xstrdup (e->caller->name ()), e->caller->order,
1425 xstrdup (e->callee->name ()), e->callee->order);
1426 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
1427 if (e->callee->clone.combined_args_to_skip)
1428 {
1429 fprintf (cgraph_dump_file, " combined args to skip: ");
1430 dump_bitmap (cgraph_dump_file,
1431 e->callee->clone.combined_args_to_skip);
1432 }
1433 }
1434
1435 if (e->callee->clone.combined_args_to_skip)
1436 {
1437 int lp_nr;
1438
1439 new_stmt
1440 = gimple_call_copy_skip_args (e->call_stmt,
1441 e->callee->clone.combined_args_to_skip);
1442 gimple_call_set_fndecl (new_stmt, e->callee->decl);
1443 gimple_call_set_fntype (new_stmt, gimple_call_fntype (e->call_stmt));
1444
1445 if (gimple_vdef (new_stmt)
1446 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1447 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1448
1449 gsi = gsi_for_stmt (e->call_stmt);
1450 gsi_replace (&gsi, new_stmt, false);
1451 /* We need to defer cleaning EH info on the new statement to
1452 fixup-cfg. We may not have dominator information at this point
1453 and thus would end up with unreachable blocks and have no way
1454 to communicate that we need to run CFG cleanup then. */
1455 lp_nr = lookup_stmt_eh_lp (e->call_stmt);
1456 if (lp_nr != 0)
1457 {
1458 remove_stmt_from_eh_lp (e->call_stmt);
1459 add_stmt_to_eh_lp (new_stmt, lp_nr);
1460 }
1461 }
1462 else
1463 {
1464 new_stmt = e->call_stmt;
1465 gimple_call_set_fndecl (new_stmt, e->callee->decl);
1466 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1467 }
1468
1469 /* If the call becomes noreturn, remove the lhs. */
1470 if (lhs && (gimple_call_flags (new_stmt) & ECF_NORETURN))
1471 {
1472 if (TREE_CODE (lhs) == SSA_NAME)
1473 {
1474 tree var = create_tmp_reg_fn (DECL_STRUCT_FUNCTION (e->caller->decl),
1475 TREE_TYPE (lhs), NULL);
1476 var = get_or_create_ssa_default_def
1477 (DECL_STRUCT_FUNCTION (e->caller->decl), var);
1478 gimple set_stmt = gimple_build_assign (lhs, var);
1479 gsi = gsi_for_stmt (new_stmt);
1480 gsi_insert_before_without_update (&gsi, set_stmt, GSI_SAME_STMT);
1481 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), set_stmt);
1482 }
1483 gimple_call_set_lhs (new_stmt, NULL_TREE);
1484 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1485 }
1486
1487 /* If new callee has no static chain, remove it. */
1488 if (gimple_call_chain (new_stmt) && !DECL_STATIC_CHAIN (e->callee->decl))
1489 {
1490 gimple_call_set_chain (new_stmt, NULL);
1491 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1492 }
1493
1494 cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt, new_stmt, false);
1495
1496 if (cgraph_dump_file)
1497 {
1498 fprintf (cgraph_dump_file, " updated to:");
1499 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
1500 }
1501 return new_stmt;
1502 }
1503
1504 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1505 OLD_STMT changed into NEW_STMT. OLD_CALL is gimple_call_fndecl
1506 of OLD_STMT if it was previously call statement.
1507 If NEW_STMT is NULL, the call has been dropped without any
1508 replacement. */
1509
1510 static void
1511 cgraph_update_edges_for_call_stmt_node (struct cgraph_node *node,
1512 gimple old_stmt, tree old_call,
1513 gimple new_stmt)
1514 {
1515 tree new_call = (new_stmt && is_gimple_call (new_stmt))
1516 ? gimple_call_fndecl (new_stmt) : 0;
1517
1518 /* We are seeing indirect calls, then there is nothing to update. */
1519 if (!new_call && !old_call)
1520 return;
1521 /* See if we turned indirect call into direct call or folded call to one builtin
1522 into different builtin. */
1523 if (old_call != new_call)
1524 {
1525 struct cgraph_edge *e = cgraph_edge (node, old_stmt);
1526 struct cgraph_edge *ne = NULL;
1527 gcov_type count;
1528 int frequency;
1529
1530 if (e)
1531 {
1532 /* See if the edge is already there and has the correct callee. It
1533 might be so because of indirect inlining has already updated
1534 it. We also might've cloned and redirected the edge. */
1535 if (new_call && e->callee)
1536 {
1537 struct cgraph_node *callee = e->callee;
1538 while (callee)
1539 {
1540 if (callee->decl == new_call
1541 || callee->former_clone_of == new_call)
1542 {
1543 cgraph_set_call_stmt (e, new_stmt);
1544 return;
1545 }
1546 callee = callee->clone_of;
1547 }
1548 }
1549
1550 /* Otherwise remove edge and create new one; we can't simply redirect
1551 since function has changed, so inline plan and other information
1552 attached to edge is invalid. */
1553 count = e->count;
1554 frequency = e->frequency;
1555 if (e->indirect_unknown_callee || e->inline_failed)
1556 cgraph_remove_edge (e);
1557 else
1558 cgraph_remove_node_and_inline_clones (e->callee, NULL);
1559 }
1560 else if (new_call)
1561 {
1562 /* We are seeing new direct call; compute profile info based on BB. */
1563 basic_block bb = gimple_bb (new_stmt);
1564 count = bb->count;
1565 frequency = compute_call_stmt_bb_frequency (current_function_decl,
1566 bb);
1567 }
1568
1569 if (new_call)
1570 {
1571 ne = cgraph_create_edge (node, cgraph_get_create_node (new_call),
1572 new_stmt, count, frequency);
1573 gcc_assert (ne->inline_failed);
1574 }
1575 }
1576 /* We only updated the call stmt; update pointer in cgraph edge.. */
1577 else if (old_stmt != new_stmt)
1578 cgraph_set_call_stmt (cgraph_edge (node, old_stmt), new_stmt);
1579 }
1580
1581 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1582 OLD_STMT changed into NEW_STMT. OLD_DECL is gimple_call_fndecl
1583 of OLD_STMT before it was updated (updating can happen inplace). */
1584
1585 void
1586 cgraph_update_edges_for_call_stmt (gimple old_stmt, tree old_decl, gimple new_stmt)
1587 {
1588 struct cgraph_node *orig = cgraph_get_node (cfun->decl);
1589 struct cgraph_node *node;
1590
1591 gcc_checking_assert (orig);
1592 cgraph_update_edges_for_call_stmt_node (orig, old_stmt, old_decl, new_stmt);
1593 if (orig->clones)
1594 for (node = orig->clones; node != orig;)
1595 {
1596 cgraph_update_edges_for_call_stmt_node (node, old_stmt, old_decl, new_stmt);
1597 if (node->clones)
1598 node = node->clones;
1599 else if (node->next_sibling_clone)
1600 node = node->next_sibling_clone;
1601 else
1602 {
1603 while (node != orig && !node->next_sibling_clone)
1604 node = node->clone_of;
1605 if (node != orig)
1606 node = node->next_sibling_clone;
1607 }
1608 }
1609 }
1610
1611
1612 /* Remove all callees from the node. */
1613
1614 void
1615 cgraph_node_remove_callees (struct cgraph_node *node)
1616 {
1617 struct cgraph_edge *e, *f;
1618
1619 /* It is sufficient to remove the edges from the lists of callers of
1620 the callees. The callee list of the node can be zapped with one
1621 assignment. */
1622 for (e = node->callees; e; e = f)
1623 {
1624 f = e->next_callee;
1625 cgraph_call_edge_removal_hooks (e);
1626 if (!e->indirect_unknown_callee)
1627 cgraph_edge_remove_callee (e);
1628 cgraph_free_edge (e);
1629 }
1630 for (e = node->indirect_calls; e; e = f)
1631 {
1632 f = e->next_callee;
1633 cgraph_call_edge_removal_hooks (e);
1634 if (!e->indirect_unknown_callee)
1635 cgraph_edge_remove_callee (e);
1636 cgraph_free_edge (e);
1637 }
1638 node->indirect_calls = NULL;
1639 node->callees = NULL;
1640 if (node->call_site_hash)
1641 {
1642 htab_delete (node->call_site_hash);
1643 node->call_site_hash = NULL;
1644 }
1645 }
1646
1647 /* Remove all callers from the node. */
1648
1649 static void
1650 cgraph_node_remove_callers (struct cgraph_node *node)
1651 {
1652 struct cgraph_edge *e, *f;
1653
1654 /* It is sufficient to remove the edges from the lists of callees of
1655 the callers. The caller list of the node can be zapped with one
1656 assignment. */
1657 for (e = node->callers; e; e = f)
1658 {
1659 f = e->next_caller;
1660 cgraph_call_edge_removal_hooks (e);
1661 cgraph_edge_remove_caller (e);
1662 cgraph_free_edge (e);
1663 }
1664 node->callers = NULL;
1665 }
1666
1667 /* Helper function for cgraph_release_function_body and free_lang_data.
1668 It releases body from function DECL without having to inspect its
1669 possibly non-existent symtab node. */
1670
1671 void
1672 release_function_body (tree decl)
1673 {
1674 if (DECL_STRUCT_FUNCTION (decl))
1675 {
1676 push_cfun (DECL_STRUCT_FUNCTION (decl));
1677 if (cfun->cfg
1678 && current_loops)
1679 {
1680 cfun->curr_properties &= ~PROP_loops;
1681 loop_optimizer_finalize ();
1682 }
1683 if (cfun->gimple_df)
1684 {
1685 delete_tree_ssa ();
1686 delete_tree_cfg_annotations ();
1687 cfun->eh = NULL;
1688 }
1689 if (cfun->cfg)
1690 {
1691 gcc_assert (!dom_info_available_p (CDI_DOMINATORS));
1692 gcc_assert (!dom_info_available_p (CDI_POST_DOMINATORS));
1693 clear_edges ();
1694 cfun->cfg = NULL;
1695 }
1696 if (cfun->value_histograms)
1697 free_histograms ();
1698 pop_cfun ();
1699 gimple_set_body (decl, NULL);
1700 /* Struct function hangs a lot of data that would leak if we didn't
1701 removed all pointers to it. */
1702 ggc_free (DECL_STRUCT_FUNCTION (decl));
1703 DECL_STRUCT_FUNCTION (decl) = NULL;
1704 }
1705 DECL_SAVED_TREE (decl) = NULL;
1706 }
1707
1708 /* Release memory used to represent body of function NODE.
1709 Use this only for functions that are released before being translated to
1710 target code (i.e. RTL). Functions that are compiled to RTL and beyond
1711 are free'd in final.c via free_after_compilation(). */
1712
1713 void
1714 cgraph_release_function_body (struct cgraph_node *node)
1715 {
1716 node->ipa_transforms_to_apply.release ();
1717 if (!node->used_as_abstract_origin && cgraph_state != CGRAPH_STATE_PARSING)
1718 {
1719 DECL_RESULT (node->decl) = NULL;
1720 DECL_ARGUMENTS (node->decl) = NULL;
1721 }
1722 /* If the node is abstract and needed, then do not clear DECL_INITIAL
1723 of its associated function function declaration because it's
1724 needed to emit debug info later. */
1725 if (!node->used_as_abstract_origin && DECL_INITIAL (node->decl))
1726 DECL_INITIAL (node->decl) = error_mark_node;
1727 release_function_body (node->decl);
1728 if (node->lto_file_data)
1729 lto_free_function_in_decl_state_for_node (node);
1730 }
1731
1732 /* Remove the node from cgraph. */
1733
1734 void
1735 cgraph_remove_node (struct cgraph_node *node)
1736 {
1737 struct cgraph_node *n;
1738 int uid = node->uid;
1739
1740 cgraph_call_node_removal_hooks (node);
1741 cgraph_node_remove_callers (node);
1742 cgraph_node_remove_callees (node);
1743 node->ipa_transforms_to_apply.release ();
1744
1745 /* Incremental inlining access removed nodes stored in the postorder list.
1746 */
1747 node->force_output = false;
1748 node->forced_by_abi = false;
1749 for (n = node->nested; n; n = n->next_nested)
1750 n->origin = NULL;
1751 node->nested = NULL;
1752 if (node->origin)
1753 {
1754 struct cgraph_node **node2 = &node->origin->nested;
1755
1756 while (*node2 != node)
1757 node2 = &(*node2)->next_nested;
1758 *node2 = node->next_nested;
1759 }
1760 symtab_unregister_node (node);
1761 if (node->prev_sibling_clone)
1762 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
1763 else if (node->clone_of)
1764 node->clone_of->clones = node->next_sibling_clone;
1765 if (node->next_sibling_clone)
1766 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
1767 if (node->clones)
1768 {
1769 struct cgraph_node *n, *next;
1770
1771 if (node->clone_of)
1772 {
1773 for (n = node->clones; n->next_sibling_clone; n = n->next_sibling_clone)
1774 n->clone_of = node->clone_of;
1775 n->clone_of = node->clone_of;
1776 n->next_sibling_clone = node->clone_of->clones;
1777 if (node->clone_of->clones)
1778 node->clone_of->clones->prev_sibling_clone = n;
1779 node->clone_of->clones = node->clones;
1780 }
1781 else
1782 {
1783 /* We are removing node with clones. This makes clones inconsistent,
1784 but assume they will be removed subsequently and just keep clone
1785 tree intact. This can happen in unreachable function removal since
1786 we remove unreachable functions in random order, not by bottom-up
1787 walk of clone trees. */
1788 for (n = node->clones; n; n = next)
1789 {
1790 next = n->next_sibling_clone;
1791 n->next_sibling_clone = NULL;
1792 n->prev_sibling_clone = NULL;
1793 n->clone_of = NULL;
1794 }
1795 }
1796 }
1797
1798 /* While all the clones are removed after being proceeded, the function
1799 itself is kept in the cgraph even after it is compiled. Check whether
1800 we are done with this body and reclaim it proactively if this is the case.
1801 */
1802 if (cgraph_state != CGRAPH_LTO_STREAMING)
1803 {
1804 n = cgraph_get_node (node->decl);
1805 if (!n
1806 || (!n->clones && !n->clone_of && !n->global.inlined_to
1807 && (cgraph_global_info_ready
1808 && (TREE_ASM_WRITTEN (n->decl)
1809 || DECL_EXTERNAL (n->decl)
1810 || !n->analyzed
1811 || (!flag_wpa && n->in_other_partition)))))
1812 cgraph_release_function_body (node);
1813 }
1814
1815 node->decl = NULL;
1816 if (node->call_site_hash)
1817 {
1818 htab_delete (node->call_site_hash);
1819 node->call_site_hash = NULL;
1820 }
1821 cgraph_n_nodes--;
1822
1823 /* Clear out the node to NULL all pointers and add the node to the free
1824 list. */
1825 memset (node, 0, sizeof (*node));
1826 node->type = SYMTAB_FUNCTION;
1827 node->uid = uid;
1828 SET_NEXT_FREE_NODE (node, free_nodes);
1829 free_nodes = node;
1830 }
1831
1832 /* Likewise indicate that a node is having address taken. */
1833
1834 void
1835 cgraph_mark_address_taken_node (struct cgraph_node *node)
1836 {
1837 /* Indirect inlining can figure out that all uses of the address are
1838 inlined. */
1839 if (node->global.inlined_to)
1840 {
1841 gcc_assert (cfun->after_inlining);
1842 gcc_assert (node->callers->indirect_inlining_edge);
1843 return;
1844 }
1845 /* FIXME: address_taken flag is used both as a shortcut for testing whether
1846 IPA_REF_ADDR reference exists (and thus it should be set on node
1847 representing alias we take address of) and as a test whether address
1848 of the object was taken (and thus it should be set on node alias is
1849 referring to). We should remove the first use and the remove the
1850 following set. */
1851 node->address_taken = 1;
1852 node = cgraph_function_or_thunk_node (node, NULL);
1853 node->address_taken = 1;
1854 }
1855
1856 /* Return local info for the compiled function. */
1857
1858 struct cgraph_local_info *
1859 cgraph_local_info (tree decl)
1860 {
1861 struct cgraph_node *node;
1862
1863 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1864 node = cgraph_get_node (decl);
1865 if (!node)
1866 return NULL;
1867 return &node->local;
1868 }
1869
1870 /* Return local info for the compiled function. */
1871
1872 struct cgraph_global_info *
1873 cgraph_global_info (tree decl)
1874 {
1875 struct cgraph_node *node;
1876
1877 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL && cgraph_global_info_ready);
1878 node = cgraph_get_node (decl);
1879 if (!node)
1880 return NULL;
1881 return &node->global;
1882 }
1883
1884 /* Return local info for the compiled function. */
1885
1886 struct cgraph_rtl_info *
1887 cgraph_rtl_info (tree decl)
1888 {
1889 struct cgraph_node *node;
1890
1891 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1892 node = cgraph_get_node (decl);
1893 if (!node
1894 || (decl != current_function_decl
1895 && !TREE_ASM_WRITTEN (node->decl)))
1896 return NULL;
1897 return &node->rtl;
1898 }
1899
1900 /* Return a string describing the failure REASON. */
1901
1902 const char*
1903 cgraph_inline_failed_string (cgraph_inline_failed_t reason)
1904 {
1905 #undef DEFCIFCODE
1906 #define DEFCIFCODE(code, type, string) string,
1907
1908 static const char *cif_string_table[CIF_N_REASONS] = {
1909 #include "cif-code.def"
1910 };
1911
1912 /* Signedness of an enum type is implementation defined, so cast it
1913 to unsigned before testing. */
1914 gcc_assert ((unsigned) reason < CIF_N_REASONS);
1915 return cif_string_table[reason];
1916 }
1917
1918 /* Return a type describing the failure REASON. */
1919
1920 cgraph_inline_failed_type_t
1921 cgraph_inline_failed_type (cgraph_inline_failed_t reason)
1922 {
1923 #undef DEFCIFCODE
1924 #define DEFCIFCODE(code, type, string) type,
1925
1926 static cgraph_inline_failed_type_t cif_type_table[CIF_N_REASONS] = {
1927 #include "cif-code.def"
1928 };
1929
1930 /* Signedness of an enum type is implementation defined, so cast it
1931 to unsigned before testing. */
1932 gcc_assert ((unsigned) reason < CIF_N_REASONS);
1933 return cif_type_table[reason];
1934 }
1935
1936 /* Names used to print out the availability enum. */
1937 const char * const cgraph_availability_names[] =
1938 {"unset", "not_available", "overwritable", "available", "local"};
1939
1940
1941 /* Dump call graph node NODE to file F. */
1942
1943 void
1944 dump_cgraph_node (FILE *f, struct cgraph_node *node)
1945 {
1946 struct cgraph_edge *edge;
1947 int indirect_calls_count = 0;
1948
1949 dump_symtab_base (f, node);
1950
1951 if (node->global.inlined_to)
1952 fprintf (f, " Function %s/%i is inline copy in %s/%i\n",
1953 xstrdup (node->name ()),
1954 node->order,
1955 xstrdup (node->global.inlined_to->name ()),
1956 node->global.inlined_to->order);
1957 if (node->clone_of)
1958 fprintf (f, " Clone of %s/%i\n",
1959 node->clone_of->asm_name (),
1960 node->clone_of->order);
1961 if (cgraph_function_flags_ready)
1962 fprintf (f, " Availability: %s\n",
1963 cgraph_availability_names [cgraph_function_body_availability (node)]);
1964
1965 if (node->profile_id)
1966 fprintf (f, " Profile id: %i\n",
1967 node->profile_id);
1968 fprintf (f, " First run: %i\n", node->tp_first_run);
1969 fprintf (f, " Function flags:");
1970 if (node->count)
1971 fprintf (f, " executed %"PRId64"x",
1972 (int64_t)node->count);
1973 if (node->origin)
1974 fprintf (f, " nested in: %s", node->origin->asm_name ());
1975 if (gimple_has_body_p (node->decl))
1976 fprintf (f, " body");
1977 if (node->process)
1978 fprintf (f, " process");
1979 if (node->local.local)
1980 fprintf (f, " local");
1981 if (node->local.redefined_extern_inline)
1982 fprintf (f, " redefined_extern_inline");
1983 if (node->only_called_at_startup)
1984 fprintf (f, " only_called_at_startup");
1985 if (node->only_called_at_exit)
1986 fprintf (f, " only_called_at_exit");
1987 if (node->tm_clone)
1988 fprintf (f, " tm_clone");
1989
1990 fprintf (f, "\n");
1991
1992 if (node->thunk.thunk_p)
1993 {
1994 fprintf (f, " Thunk");
1995 if (node->thunk.alias)
1996 fprintf (f, " of %s (asm: %s)",
1997 lang_hooks.decl_printable_name (node->thunk.alias, 2),
1998 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->thunk.alias)));
1999 fprintf (f, " fixed offset %i virtual value %i has "
2000 "virtual offset %i)\n",
2001 (int)node->thunk.fixed_offset,
2002 (int)node->thunk.virtual_value,
2003 (int)node->thunk.virtual_offset_p);
2004 }
2005 if (node->alias && node->thunk.alias
2006 && DECL_P (node->thunk.alias))
2007 {
2008 fprintf (f, " Alias of %s",
2009 lang_hooks.decl_printable_name (node->thunk.alias, 2));
2010 if (DECL_ASSEMBLER_NAME_SET_P (node->thunk.alias))
2011 fprintf (f, " (asm: %s)",
2012 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->thunk.alias)));
2013 fprintf (f, "\n");
2014 }
2015
2016 fprintf (f, " Called by: ");
2017
2018 for (edge = node->callers; edge; edge = edge->next_caller)
2019 {
2020 fprintf (f, "%s/%i ", edge->caller->asm_name (),
2021 edge->caller->order);
2022 if (edge->count)
2023 fprintf (f, "(%"PRId64"x) ",
2024 (int64_t)edge->count);
2025 if (edge->frequency)
2026 fprintf (f, "(%.2f per call) ",
2027 edge->frequency / (double)CGRAPH_FREQ_BASE);
2028 if (edge->speculative)
2029 fprintf (f, "(speculative) ");
2030 if (!edge->inline_failed)
2031 fprintf (f, "(inlined) ");
2032 if (edge->indirect_inlining_edge)
2033 fprintf (f, "(indirect_inlining) ");
2034 if (edge->can_throw_external)
2035 fprintf (f, "(can throw external) ");
2036 }
2037
2038 fprintf (f, "\n Calls: ");
2039 for (edge = node->callees; edge; edge = edge->next_callee)
2040 {
2041 fprintf (f, "%s/%i ", edge->callee->asm_name (),
2042 edge->callee->order);
2043 if (edge->speculative)
2044 fprintf (f, "(speculative) ");
2045 if (!edge->inline_failed)
2046 fprintf (f, "(inlined) ");
2047 if (edge->indirect_inlining_edge)
2048 fprintf (f, "(indirect_inlining) ");
2049 if (edge->count)
2050 fprintf (f, "(%"PRId64"x) ",
2051 (int64_t)edge->count);
2052 if (edge->frequency)
2053 fprintf (f, "(%.2f per call) ",
2054 edge->frequency / (double)CGRAPH_FREQ_BASE);
2055 if (edge->can_throw_external)
2056 fprintf (f, "(can throw external) ");
2057 }
2058 fprintf (f, "\n");
2059
2060 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
2061 indirect_calls_count++;
2062 if (indirect_calls_count)
2063 fprintf (f, " Has %i outgoing edges for indirect calls.\n",
2064 indirect_calls_count);
2065 }
2066
2067
2068 /* Dump call graph node NODE to stderr. */
2069
2070 DEBUG_FUNCTION void
2071 debug_cgraph_node (struct cgraph_node *node)
2072 {
2073 dump_cgraph_node (stderr, node);
2074 }
2075
2076
2077 /* Dump the callgraph to file F. */
2078
2079 void
2080 dump_cgraph (FILE *f)
2081 {
2082 struct cgraph_node *node;
2083
2084 fprintf (f, "callgraph:\n\n");
2085 FOR_EACH_FUNCTION (node)
2086 dump_cgraph_node (f, node);
2087 }
2088
2089
2090 /* Dump the call graph to stderr. */
2091
2092 DEBUG_FUNCTION void
2093 debug_cgraph (void)
2094 {
2095 dump_cgraph (stderr);
2096 }
2097
2098 /* Return true when the DECL can possibly be inlined. */
2099 bool
2100 cgraph_function_possibly_inlined_p (tree decl)
2101 {
2102 if (!cgraph_global_info_ready)
2103 return !DECL_UNINLINABLE (decl);
2104 return DECL_POSSIBLY_INLINED (decl);
2105 }
2106
2107 /* NODE is no longer nested function; update cgraph accordingly. */
2108 void
2109 cgraph_unnest_node (struct cgraph_node *node)
2110 {
2111 struct cgraph_node **node2 = &node->origin->nested;
2112 gcc_assert (node->origin);
2113
2114 while (*node2 != node)
2115 node2 = &(*node2)->next_nested;
2116 *node2 = node->next_nested;
2117 node->origin = NULL;
2118 }
2119
2120 /* Return function availability. See cgraph.h for description of individual
2121 return values. */
2122 enum availability
2123 cgraph_function_body_availability (struct cgraph_node *node)
2124 {
2125 enum availability avail;
2126 if (!node->analyzed)
2127 avail = AVAIL_NOT_AVAILABLE;
2128 else if (node->local.local)
2129 avail = AVAIL_LOCAL;
2130 else if (node->alias && node->weakref)
2131 cgraph_function_or_thunk_node (node, &avail);
2132 else if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (node->decl)))
2133 avail = AVAIL_OVERWRITABLE;
2134 else if (!node->externally_visible)
2135 avail = AVAIL_AVAILABLE;
2136 /* Inline functions are safe to be analyzed even if their symbol can
2137 be overwritten at runtime. It is not meaningful to enforce any sane
2138 behaviour on replacing inline function by different body. */
2139 else if (DECL_DECLARED_INLINE_P (node->decl))
2140 avail = AVAIL_AVAILABLE;
2141
2142 /* If the function can be overwritten, return OVERWRITABLE. Take
2143 care at least of two notable extensions - the COMDAT functions
2144 used to share template instantiations in C++ (this is symmetric
2145 to code cp_cannot_inline_tree_fn and probably shall be shared and
2146 the inlinability hooks completely eliminated).
2147
2148 ??? Does the C++ one definition rule allow us to always return
2149 AVAIL_AVAILABLE here? That would be good reason to preserve this
2150 bit. */
2151
2152 else if (decl_replaceable_p (node->decl)
2153 && !DECL_EXTERNAL (node->decl))
2154 avail = AVAIL_OVERWRITABLE;
2155 else avail = AVAIL_AVAILABLE;
2156
2157 return avail;
2158 }
2159
2160 /* Worker for cgraph_node_can_be_local_p. */
2161 static bool
2162 cgraph_node_cannot_be_local_p_1 (struct cgraph_node *node,
2163 void *data ATTRIBUTE_UNUSED)
2164 {
2165 return !(!node->force_output
2166 && ((DECL_COMDAT (node->decl)
2167 && !node->forced_by_abi
2168 && !symtab_used_from_object_file_p (node)
2169 && !node->same_comdat_group)
2170 || !node->externally_visible));
2171 }
2172
2173 /* Return true if NODE can be made local for API change.
2174 Extern inline functions and C++ COMDAT functions can be made local
2175 at the expense of possible code size growth if function is used in multiple
2176 compilation units. */
2177 bool
2178 cgraph_node_can_be_local_p (struct cgraph_node *node)
2179 {
2180 return (!node->address_taken
2181 && !cgraph_for_node_and_aliases (node,
2182 cgraph_node_cannot_be_local_p_1,
2183 NULL, true));
2184 }
2185
2186 /* Call calback on NODE, thunks and aliases associated to NODE.
2187 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
2188 skipped. */
2189
2190 bool
2191 cgraph_for_node_thunks_and_aliases (struct cgraph_node *node,
2192 bool (*callback) (struct cgraph_node *, void *),
2193 void *data,
2194 bool include_overwritable)
2195 {
2196 struct cgraph_edge *e;
2197 int i;
2198 struct ipa_ref *ref = NULL;
2199
2200 if (callback (node, data))
2201 return true;
2202 for (e = node->callers; e; e = e->next_caller)
2203 if (e->caller->thunk.thunk_p
2204 && (include_overwritable
2205 || cgraph_function_body_availability (e->caller) > AVAIL_OVERWRITABLE))
2206 if (cgraph_for_node_thunks_and_aliases (e->caller, callback, data,
2207 include_overwritable))
2208 return true;
2209 for (i = 0; node->iterate_referring (i, ref); i++)
2210 if (ref->use == IPA_REF_ALIAS)
2211 {
2212 struct cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2213 if (include_overwritable
2214 || cgraph_function_body_availability (alias) > AVAIL_OVERWRITABLE)
2215 if (cgraph_for_node_thunks_and_aliases (alias, callback, data,
2216 include_overwritable))
2217 return true;
2218 }
2219 return false;
2220 }
2221
2222 /* Call calback on NODE and aliases associated to NODE.
2223 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
2224 skipped. */
2225
2226 bool
2227 cgraph_for_node_and_aliases (struct cgraph_node *node,
2228 bool (*callback) (struct cgraph_node *, void *),
2229 void *data,
2230 bool include_overwritable)
2231 {
2232 int i;
2233 struct ipa_ref *ref = NULL;
2234
2235 if (callback (node, data))
2236 return true;
2237 for (i = 0; node->iterate_referring (i, ref); i++)
2238 if (ref->use == IPA_REF_ALIAS)
2239 {
2240 struct cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2241 if (include_overwritable
2242 || cgraph_function_body_availability (alias) > AVAIL_OVERWRITABLE)
2243 if (cgraph_for_node_and_aliases (alias, callback, data,
2244 include_overwritable))
2245 return true;
2246 }
2247 return false;
2248 }
2249
2250 /* Worker to bring NODE local. */
2251
2252 static bool
2253 cgraph_make_node_local_1 (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2254 {
2255 gcc_checking_assert (cgraph_node_can_be_local_p (node));
2256 if (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
2257 {
2258 symtab_make_decl_local (node->decl);
2259
2260 node->set_section (NULL);
2261 node->set_comdat_group (NULL);
2262 node->externally_visible = false;
2263 node->forced_by_abi = false;
2264 node->local.local = true;
2265 node->set_section (NULL);
2266 node->unique_name = (node->resolution == LDPR_PREVAILING_DEF_IRONLY
2267 || node->resolution == LDPR_PREVAILING_DEF_IRONLY_EXP);
2268 node->resolution = LDPR_PREVAILING_DEF_IRONLY;
2269 gcc_assert (cgraph_function_body_availability (node) == AVAIL_LOCAL);
2270 }
2271 return false;
2272 }
2273
2274 /* Bring NODE local. */
2275
2276 void
2277 cgraph_make_node_local (struct cgraph_node *node)
2278 {
2279 cgraph_for_node_thunks_and_aliases (node, cgraph_make_node_local_1,
2280 NULL, true);
2281 }
2282
2283 /* Worker to set nothrow flag. */
2284
2285 static bool
2286 cgraph_set_nothrow_flag_1 (struct cgraph_node *node, void *data)
2287 {
2288 struct cgraph_edge *e;
2289
2290 TREE_NOTHROW (node->decl) = data != NULL;
2291
2292 if (data != NULL)
2293 for (e = node->callers; e; e = e->next_caller)
2294 e->can_throw_external = false;
2295 return false;
2296 }
2297
2298 /* Set TREE_NOTHROW on NODE's decl and on aliases of NODE
2299 if any to NOTHROW. */
2300
2301 void
2302 cgraph_set_nothrow_flag (struct cgraph_node *node, bool nothrow)
2303 {
2304 cgraph_for_node_thunks_and_aliases (node, cgraph_set_nothrow_flag_1,
2305 (void *)(size_t)nothrow, false);
2306 }
2307
2308 /* Worker to set const flag. */
2309
2310 static bool
2311 cgraph_set_const_flag_1 (struct cgraph_node *node, void *data)
2312 {
2313 /* Static constructors and destructors without a side effect can be
2314 optimized out. */
2315 if (data && !((size_t)data & 2))
2316 {
2317 if (DECL_STATIC_CONSTRUCTOR (node->decl))
2318 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2319 if (DECL_STATIC_DESTRUCTOR (node->decl))
2320 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2321 }
2322 TREE_READONLY (node->decl) = data != NULL;
2323 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = ((size_t)data & 2) != 0;
2324 return false;
2325 }
2326
2327 /* Set TREE_READONLY on NODE's decl and on aliases of NODE
2328 if any to READONLY. */
2329
2330 void
2331 cgraph_set_const_flag (struct cgraph_node *node, bool readonly, bool looping)
2332 {
2333 cgraph_for_node_thunks_and_aliases (node, cgraph_set_const_flag_1,
2334 (void *)(size_t)(readonly + (int)looping * 2),
2335 false);
2336 }
2337
2338 /* Worker to set pure flag. */
2339
2340 static bool
2341 cgraph_set_pure_flag_1 (struct cgraph_node *node, void *data)
2342 {
2343 /* Static constructors and destructors without a side effect can be
2344 optimized out. */
2345 if (data && !((size_t)data & 2))
2346 {
2347 if (DECL_STATIC_CONSTRUCTOR (node->decl))
2348 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2349 if (DECL_STATIC_DESTRUCTOR (node->decl))
2350 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2351 }
2352 DECL_PURE_P (node->decl) = data != NULL;
2353 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = ((size_t)data & 2) != 0;
2354 return false;
2355 }
2356
2357 /* Set DECL_PURE_P on NODE's decl and on aliases of NODE
2358 if any to PURE. */
2359
2360 void
2361 cgraph_set_pure_flag (struct cgraph_node *node, bool pure, bool looping)
2362 {
2363 cgraph_for_node_thunks_and_aliases (node, cgraph_set_pure_flag_1,
2364 (void *)(size_t)(pure + (int)looping * 2),
2365 false);
2366 }
2367
2368 /* Return true when NODE can not return or throw and thus
2369 it is safe to ignore its side effects for IPA analysis. */
2370
2371 bool
2372 cgraph_node_cannot_return (struct cgraph_node *node)
2373 {
2374 int flags = flags_from_decl_or_type (node->decl);
2375 if (!flag_exceptions)
2376 return (flags & ECF_NORETURN) != 0;
2377 else
2378 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2379 == (ECF_NORETURN | ECF_NOTHROW));
2380 }
2381
2382 /* Return true when call of E can not lead to return from caller
2383 and thus it is safe to ignore its side effects for IPA analysis
2384 when computing side effects of the caller.
2385 FIXME: We could actually mark all edges that have no reaching
2386 patch to the exit block or throw to get better results. */
2387 bool
2388 cgraph_edge_cannot_lead_to_return (struct cgraph_edge *e)
2389 {
2390 if (cgraph_node_cannot_return (e->caller))
2391 return true;
2392 if (e->indirect_unknown_callee)
2393 {
2394 int flags = e->indirect_info->ecf_flags;
2395 if (!flag_exceptions)
2396 return (flags & ECF_NORETURN) != 0;
2397 else
2398 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2399 == (ECF_NORETURN | ECF_NOTHROW));
2400 }
2401 else
2402 return cgraph_node_cannot_return (e->callee);
2403 }
2404
2405 /* Return true when function NODE can be removed from callgraph
2406 if all direct calls are eliminated. */
2407
2408 bool
2409 cgraph_can_remove_if_no_direct_calls_and_refs_p (struct cgraph_node *node)
2410 {
2411 gcc_assert (!node->global.inlined_to);
2412 /* Extern inlines can always go, we will use the external definition. */
2413 if (DECL_EXTERNAL (node->decl))
2414 return true;
2415 /* When function is needed, we can not remove it. */
2416 if (node->force_output || node->used_from_other_partition)
2417 return false;
2418 if (DECL_STATIC_CONSTRUCTOR (node->decl)
2419 || DECL_STATIC_DESTRUCTOR (node->decl))
2420 return false;
2421 /* Only COMDAT functions can be removed if externally visible. */
2422 if (node->externally_visible
2423 && (!DECL_COMDAT (node->decl)
2424 || node->forced_by_abi
2425 || symtab_used_from_object_file_p (node)))
2426 return false;
2427 return true;
2428 }
2429
2430 /* Worker for cgraph_can_remove_if_no_direct_calls_p. */
2431
2432 static bool
2433 nonremovable_p (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2434 {
2435 return !cgraph_can_remove_if_no_direct_calls_and_refs_p (node);
2436 }
2437
2438 /* Return true when function NODE and its aliases can be removed from callgraph
2439 if all direct calls are eliminated. */
2440
2441 bool
2442 cgraph_can_remove_if_no_direct_calls_p (struct cgraph_node *node)
2443 {
2444 /* Extern inlines can always go, we will use the external definition. */
2445 if (DECL_EXTERNAL (node->decl))
2446 return true;
2447 if (node->address_taken)
2448 return false;
2449 return !cgraph_for_node_and_aliases (node, nonremovable_p, NULL, true);
2450 }
2451
2452 /* Worker for cgraph_can_remove_if_no_direct_calls_p. */
2453
2454 static bool
2455 used_from_object_file_p (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2456 {
2457 return symtab_used_from_object_file_p (node);
2458 }
2459
2460 /* Return true when function NODE can be expected to be removed
2461 from program when direct calls in this compilation unit are removed.
2462
2463 As a special case COMDAT functions are
2464 cgraph_can_remove_if_no_direct_calls_p while the are not
2465 cgraph_only_called_directly_p (it is possible they are called from other
2466 unit)
2467
2468 This function behaves as cgraph_only_called_directly_p because eliminating
2469 all uses of COMDAT function does not make it necessarily disappear from
2470 the program unless we are compiling whole program or we do LTO. In this
2471 case we know we win since dynamic linking will not really discard the
2472 linkonce section. */
2473
2474 bool
2475 cgraph_will_be_removed_from_program_if_no_direct_calls (struct cgraph_node *node)
2476 {
2477 gcc_assert (!node->global.inlined_to);
2478 if (cgraph_for_node_and_aliases (node, used_from_object_file_p, NULL, true))
2479 return false;
2480 if (!in_lto_p && !flag_whole_program)
2481 return cgraph_only_called_directly_p (node);
2482 else
2483 {
2484 if (DECL_EXTERNAL (node->decl))
2485 return true;
2486 return cgraph_can_remove_if_no_direct_calls_p (node);
2487 }
2488 }
2489
2490
2491 /* Worker for cgraph_only_called_directly_p. */
2492
2493 static bool
2494 cgraph_not_only_called_directly_p_1 (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2495 {
2496 return !cgraph_only_called_directly_or_aliased_p (node);
2497 }
2498
2499 /* Return true when function NODE and all its aliases are only called
2500 directly.
2501 i.e. it is not externally visible, address was not taken and
2502 it is not used in any other non-standard way. */
2503
2504 bool
2505 cgraph_only_called_directly_p (struct cgraph_node *node)
2506 {
2507 gcc_assert (cgraph_function_or_thunk_node (node, NULL) == node);
2508 return !cgraph_for_node_and_aliases (node, cgraph_not_only_called_directly_p_1,
2509 NULL, true);
2510 }
2511
2512
2513 /* Collect all callers of NODE. Worker for collect_callers_of_node. */
2514
2515 static bool
2516 collect_callers_of_node_1 (struct cgraph_node *node, void *data)
2517 {
2518 vec<cgraph_edge_p> *redirect_callers = (vec<cgraph_edge_p> *)data;
2519 struct cgraph_edge *cs;
2520 enum availability avail;
2521 cgraph_function_or_thunk_node (node, &avail);
2522
2523 if (avail > AVAIL_OVERWRITABLE)
2524 for (cs = node->callers; cs != NULL; cs = cs->next_caller)
2525 if (!cs->indirect_inlining_edge)
2526 redirect_callers->safe_push (cs);
2527 return false;
2528 }
2529
2530 /* Collect all callers of NODE and its aliases that are known to lead to NODE
2531 (i.e. are not overwritable). */
2532
2533 vec<cgraph_edge_p>
2534 collect_callers_of_node (struct cgraph_node *node)
2535 {
2536 vec<cgraph_edge_p> redirect_callers = vNULL;
2537 cgraph_for_node_and_aliases (node, collect_callers_of_node_1,
2538 &redirect_callers, false);
2539 return redirect_callers;
2540 }
2541
2542 /* Return TRUE if NODE2 a clone of NODE or is equivalent to it. */
2543
2544 static bool
2545 clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
2546 {
2547 bool skipped_thunk = false;
2548 node = cgraph_function_or_thunk_node (node, NULL);
2549 node2 = cgraph_function_or_thunk_node (node2, NULL);
2550
2551 /* There are no virtual clones of thunks so check former_clone_of or if we
2552 might have skipped thunks because this adjustments are no longer
2553 necessary. */
2554 while (node->thunk.thunk_p)
2555 {
2556 if (node2->former_clone_of == node->decl)
2557 return true;
2558 if (!node->thunk.this_adjusting)
2559 return false;
2560 node = cgraph_function_or_thunk_node (node->callees->callee, NULL);
2561 skipped_thunk = true;
2562 }
2563
2564 if (skipped_thunk)
2565 {
2566 if (!node2->clone.args_to_skip
2567 || !bitmap_bit_p (node2->clone.args_to_skip, 0))
2568 return false;
2569 if (node2->former_clone_of == node->decl)
2570 return true;
2571 else if (!node2->clone_of)
2572 return false;
2573 }
2574
2575 while (node != node2 && node2)
2576 node2 = node2->clone_of;
2577 return node2 != NULL;
2578 }
2579
2580 /* Verify edge E count and frequency. */
2581
2582 static bool
2583 verify_edge_count_and_frequency (struct cgraph_edge *e)
2584 {
2585 bool error_found = false;
2586 if (e->count < 0)
2587 {
2588 error ("caller edge count is negative");
2589 error_found = true;
2590 }
2591 if (e->frequency < 0)
2592 {
2593 error ("caller edge frequency is negative");
2594 error_found = true;
2595 }
2596 if (e->frequency > CGRAPH_FREQ_MAX)
2597 {
2598 error ("caller edge frequency is too large");
2599 error_found = true;
2600 }
2601 if (gimple_has_body_p (e->caller->decl)
2602 && !e->caller->global.inlined_to
2603 && !e->speculative
2604 /* FIXME: Inline-analysis sets frequency to 0 when edge is optimized out.
2605 Remove this once edges are actually removed from the function at that time. */
2606 && (e->frequency
2607 || (inline_edge_summary_vec.exists ()
2608 && ((inline_edge_summary_vec.length () <= (unsigned) e->uid)
2609 || !inline_edge_summary (e)->predicate)))
2610 && (e->frequency
2611 != compute_call_stmt_bb_frequency (e->caller->decl,
2612 gimple_bb (e->call_stmt))))
2613 {
2614 error ("caller edge frequency %i does not match BB frequency %i",
2615 e->frequency,
2616 compute_call_stmt_bb_frequency (e->caller->decl,
2617 gimple_bb (e->call_stmt)));
2618 error_found = true;
2619 }
2620 return error_found;
2621 }
2622
2623 /* Switch to THIS_CFUN if needed and print STMT to stderr. */
2624 static void
2625 cgraph_debug_gimple_stmt (struct function *this_cfun, gimple stmt)
2626 {
2627 bool fndecl_was_null = false;
2628 /* debug_gimple_stmt needs correct cfun */
2629 if (cfun != this_cfun)
2630 set_cfun (this_cfun);
2631 /* ...and an actual current_function_decl */
2632 if (!current_function_decl)
2633 {
2634 current_function_decl = this_cfun->decl;
2635 fndecl_was_null = true;
2636 }
2637 debug_gimple_stmt (stmt);
2638 if (fndecl_was_null)
2639 current_function_decl = NULL;
2640 }
2641
2642 /* Verify that call graph edge E corresponds to DECL from the associated
2643 statement. Return true if the verification should fail. */
2644
2645 static bool
2646 verify_edge_corresponds_to_fndecl (struct cgraph_edge *e, tree decl)
2647 {
2648 struct cgraph_node *node;
2649
2650 if (!decl || e->callee->global.inlined_to)
2651 return false;
2652 if (cgraph_state == CGRAPH_LTO_STREAMING)
2653 return false;
2654 node = cgraph_get_node (decl);
2655
2656 /* We do not know if a node from a different partition is an alias or what it
2657 aliases and therefore cannot do the former_clone_of check reliably. When
2658 body_removed is set, we have lost all information about what was alias or
2659 thunk of and also cannot proceed. */
2660 if (!node
2661 || node->body_removed
2662 || node->in_other_partition
2663 || e->callee->in_other_partition)
2664 return false;
2665
2666 /* Optimizers can redirect unreachable calls or calls triggering undefined
2667 behaviour to builtin_unreachable. */
2668 if (DECL_BUILT_IN_CLASS (e->callee->decl) == BUILT_IN_NORMAL
2669 && DECL_FUNCTION_CODE (e->callee->decl) == BUILT_IN_UNREACHABLE)
2670 return false;
2671 node = cgraph_function_or_thunk_node (node, NULL);
2672
2673 if (e->callee->former_clone_of != node->decl
2674 && (node != cgraph_function_or_thunk_node (e->callee, NULL))
2675 && !clone_of_p (node, e->callee))
2676 return true;
2677 else
2678 return false;
2679 }
2680
2681 /* Verify cgraph nodes of given cgraph node. */
2682 DEBUG_FUNCTION void
2683 verify_cgraph_node (struct cgraph_node *node)
2684 {
2685 struct cgraph_edge *e;
2686 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
2687 basic_block this_block;
2688 gimple_stmt_iterator gsi;
2689 bool error_found = false;
2690
2691 if (seen_error ())
2692 return;
2693
2694 timevar_push (TV_CGRAPH_VERIFY);
2695 error_found |= verify_symtab_base (node);
2696 for (e = node->callees; e; e = e->next_callee)
2697 if (e->aux)
2698 {
2699 error ("aux field set for edge %s->%s",
2700 identifier_to_locale (e->caller->name ()),
2701 identifier_to_locale (e->callee->name ()));
2702 error_found = true;
2703 }
2704 if (node->count < 0)
2705 {
2706 error ("execution count is negative");
2707 error_found = true;
2708 }
2709 if (node->global.inlined_to && node->same_comdat_group)
2710 {
2711 error ("inline clone in same comdat group list");
2712 error_found = true;
2713 }
2714 if (!node->definition && !node->in_other_partition && node->local.local)
2715 {
2716 error ("local symbols must be defined");
2717 error_found = true;
2718 }
2719 if (node->global.inlined_to && node->externally_visible)
2720 {
2721 error ("externally visible inline clone");
2722 error_found = true;
2723 }
2724 if (node->global.inlined_to && node->address_taken)
2725 {
2726 error ("inline clone with address taken");
2727 error_found = true;
2728 }
2729 if (node->global.inlined_to && node->force_output)
2730 {
2731 error ("inline clone is forced to output");
2732 error_found = true;
2733 }
2734 for (e = node->indirect_calls; e; e = e->next_callee)
2735 {
2736 if (e->aux)
2737 {
2738 error ("aux field set for indirect edge from %s",
2739 identifier_to_locale (e->caller->name ()));
2740 error_found = true;
2741 }
2742 if (!e->indirect_unknown_callee
2743 || !e->indirect_info)
2744 {
2745 error ("An indirect edge from %s is not marked as indirect or has "
2746 "associated indirect_info, the corresponding statement is: ",
2747 identifier_to_locale (e->caller->name ()));
2748 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
2749 error_found = true;
2750 }
2751 }
2752 bool check_comdat = symtab_comdat_local_p (node);
2753 for (e = node->callers; e; e = e->next_caller)
2754 {
2755 if (verify_edge_count_and_frequency (e))
2756 error_found = true;
2757 if (check_comdat
2758 && !symtab_in_same_comdat_p (e->caller, node))
2759 {
2760 error ("comdat-local function called by %s outside its comdat",
2761 identifier_to_locale (e->caller->name ()));
2762 error_found = true;
2763 }
2764 if (!e->inline_failed)
2765 {
2766 if (node->global.inlined_to
2767 != (e->caller->global.inlined_to
2768 ? e->caller->global.inlined_to : e->caller))
2769 {
2770 error ("inlined_to pointer is wrong");
2771 error_found = true;
2772 }
2773 if (node->callers->next_caller)
2774 {
2775 error ("multiple inline callers");
2776 error_found = true;
2777 }
2778 }
2779 else
2780 if (node->global.inlined_to)
2781 {
2782 error ("inlined_to pointer set for noninline callers");
2783 error_found = true;
2784 }
2785 }
2786 for (e = node->indirect_calls; e; e = e->next_callee)
2787 if (verify_edge_count_and_frequency (e))
2788 error_found = true;
2789 if (!node->callers && node->global.inlined_to)
2790 {
2791 error ("inlined_to pointer is set but no predecessors found");
2792 error_found = true;
2793 }
2794 if (node->global.inlined_to == node)
2795 {
2796 error ("inlined_to pointer refers to itself");
2797 error_found = true;
2798 }
2799
2800 if (node->clone_of)
2801 {
2802 struct cgraph_node *n;
2803 for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
2804 if (n == node)
2805 break;
2806 if (!n)
2807 {
2808 error ("node has wrong clone_of");
2809 error_found = true;
2810 }
2811 }
2812 if (node->clones)
2813 {
2814 struct cgraph_node *n;
2815 for (n = node->clones; n; n = n->next_sibling_clone)
2816 if (n->clone_of != node)
2817 break;
2818 if (n)
2819 {
2820 error ("node has wrong clone list");
2821 error_found = true;
2822 }
2823 }
2824 if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
2825 {
2826 error ("node is in clone list but it is not clone");
2827 error_found = true;
2828 }
2829 if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
2830 {
2831 error ("node has wrong prev_clone pointer");
2832 error_found = true;
2833 }
2834 if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
2835 {
2836 error ("double linked list of clones corrupted");
2837 error_found = true;
2838 }
2839
2840 if (node->analyzed && node->alias)
2841 {
2842 bool ref_found = false;
2843 int i;
2844 struct ipa_ref *ref = NULL;
2845
2846 if (node->callees)
2847 {
2848 error ("Alias has call edges");
2849 error_found = true;
2850 }
2851 for (i = 0; node->iterate_reference (i, ref); i++)
2852 if (ref->use != IPA_REF_ALIAS)
2853 {
2854 error ("Alias has non-alias reference");
2855 error_found = true;
2856 }
2857 else if (ref_found)
2858 {
2859 error ("Alias has more than one alias reference");
2860 error_found = true;
2861 }
2862 else
2863 ref_found = true;
2864 if (!ref_found)
2865 {
2866 error ("Analyzed alias has no reference");
2867 error_found = true;
2868 }
2869 }
2870 if (node->analyzed && node->thunk.thunk_p)
2871 {
2872 if (!node->callees)
2873 {
2874 error ("No edge out of thunk node");
2875 error_found = true;
2876 }
2877 else if (node->callees->next_callee)
2878 {
2879 error ("More than one edge out of thunk node");
2880 error_found = true;
2881 }
2882 if (gimple_has_body_p (node->decl))
2883 {
2884 error ("Thunk is not supposed to have body");
2885 error_found = true;
2886 }
2887 }
2888 else if (node->analyzed && gimple_has_body_p (node->decl)
2889 && !TREE_ASM_WRITTEN (node->decl)
2890 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
2891 && !flag_wpa)
2892 {
2893 if (this_cfun->cfg)
2894 {
2895 pointer_set_t *stmts = pointer_set_create ();
2896 int i;
2897 struct ipa_ref *ref = NULL;
2898
2899 /* Reach the trees by walking over the CFG, and note the
2900 enclosing basic-blocks in the call edges. */
2901 FOR_EACH_BB_FN (this_block, this_cfun)
2902 {
2903 for (gsi = gsi_start_phis (this_block);
2904 !gsi_end_p (gsi); gsi_next (&gsi))
2905 pointer_set_insert (stmts, gsi_stmt (gsi));
2906 for (gsi = gsi_start_bb (this_block);
2907 !gsi_end_p (gsi);
2908 gsi_next (&gsi))
2909 {
2910 gimple stmt = gsi_stmt (gsi);
2911 pointer_set_insert (stmts, stmt);
2912 if (is_gimple_call (stmt))
2913 {
2914 struct cgraph_edge *e = cgraph_edge (node, stmt);
2915 tree decl = gimple_call_fndecl (stmt);
2916 if (e)
2917 {
2918 if (e->aux)
2919 {
2920 error ("shared call_stmt:");
2921 cgraph_debug_gimple_stmt (this_cfun, stmt);
2922 error_found = true;
2923 }
2924 if (!e->indirect_unknown_callee)
2925 {
2926 if (verify_edge_corresponds_to_fndecl (e, decl))
2927 {
2928 error ("edge points to wrong declaration:");
2929 debug_tree (e->callee->decl);
2930 fprintf (stderr," Instead of:");
2931 debug_tree (decl);
2932 error_found = true;
2933 }
2934 }
2935 else if (decl)
2936 {
2937 error ("an indirect edge with unknown callee "
2938 "corresponding to a call_stmt with "
2939 "a known declaration:");
2940 error_found = true;
2941 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
2942 }
2943 e->aux = (void *)1;
2944 }
2945 else if (decl)
2946 {
2947 error ("missing callgraph edge for call stmt:");
2948 cgraph_debug_gimple_stmt (this_cfun, stmt);
2949 error_found = true;
2950 }
2951 }
2952 }
2953 }
2954 for (i = 0;
2955 node->iterate_reference (i, ref); i++)
2956 if (ref->stmt && !pointer_set_contains (stmts, ref->stmt))
2957 {
2958 error ("reference to dead statement");
2959 cgraph_debug_gimple_stmt (this_cfun, ref->stmt);
2960 error_found = true;
2961 }
2962 pointer_set_destroy (stmts);
2963 }
2964 else
2965 /* No CFG available?! */
2966 gcc_unreachable ();
2967
2968 for (e = node->callees; e; e = e->next_callee)
2969 {
2970 if (!e->aux)
2971 {
2972 error ("edge %s->%s has no corresponding call_stmt",
2973 identifier_to_locale (e->caller->name ()),
2974 identifier_to_locale (e->callee->name ()));
2975 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
2976 error_found = true;
2977 }
2978 e->aux = 0;
2979 }
2980 for (e = node->indirect_calls; e; e = e->next_callee)
2981 {
2982 if (!e->aux && !e->speculative)
2983 {
2984 error ("an indirect edge from %s has no corresponding call_stmt",
2985 identifier_to_locale (e->caller->name ()));
2986 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
2987 error_found = true;
2988 }
2989 e->aux = 0;
2990 }
2991 }
2992 if (error_found)
2993 {
2994 dump_cgraph_node (stderr, node);
2995 internal_error ("verify_cgraph_node failed");
2996 }
2997 timevar_pop (TV_CGRAPH_VERIFY);
2998 }
2999
3000 /* Verify whole cgraph structure. */
3001 DEBUG_FUNCTION void
3002 verify_cgraph (void)
3003 {
3004 struct cgraph_node *node;
3005
3006 if (seen_error ())
3007 return;
3008
3009 FOR_EACH_FUNCTION (node)
3010 verify_cgraph_node (node);
3011 }
3012
3013 /* Given NODE, walk the alias chain to return the function NODE is alias of.
3014 Walk through thunk, too.
3015 When AVAILABILITY is non-NULL, get minimal availability in the chain. */
3016
3017 struct cgraph_node *
3018 cgraph_function_node (struct cgraph_node *node, enum availability *availability)
3019 {
3020 do
3021 {
3022 node = cgraph_function_or_thunk_node (node, availability);
3023 if (node->thunk.thunk_p)
3024 {
3025 node = node->callees->callee;
3026 if (availability)
3027 {
3028 enum availability a;
3029 a = cgraph_function_body_availability (node);
3030 if (a < *availability)
3031 *availability = a;
3032 }
3033 node = cgraph_function_or_thunk_node (node, availability);
3034 }
3035 } while (node && node->thunk.thunk_p);
3036 return node;
3037 }
3038
3039 /* When doing LTO, read NODE's body from disk if it is not already present. */
3040
3041 bool
3042 cgraph_get_body (struct cgraph_node *node)
3043 {
3044 struct lto_file_decl_data *file_data;
3045 const char *data, *name;
3046 size_t len;
3047 tree decl = node->decl;
3048
3049 if (DECL_RESULT (decl))
3050 return false;
3051
3052 gcc_assert (in_lto_p);
3053
3054 file_data = node->lto_file_data;
3055 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
3056
3057 /* We may have renamed the declaration, e.g., a static function. */
3058 name = lto_get_decl_name_mapping (file_data, name);
3059
3060 data = lto_get_section_data (file_data, LTO_section_function_body,
3061 name, &len);
3062 if (!data)
3063 {
3064 dump_cgraph_node (stderr, node);
3065 fatal_error ("%s: section %s is missing",
3066 file_data->file_name,
3067 name);
3068 }
3069
3070 gcc_assert (DECL_STRUCT_FUNCTION (decl) == NULL);
3071
3072 lto_input_function_body (file_data, node, data);
3073 lto_stats.num_function_bodies++;
3074 lto_free_section_data (file_data, LTO_section_function_body, name,
3075 data, len);
3076 lto_free_function_in_decl_state_for_node (node);
3077 return true;
3078 }
3079
3080 /* Verify if the type of the argument matches that of the function
3081 declaration. If we cannot verify this or there is a mismatch,
3082 return false. */
3083
3084 static bool
3085 gimple_check_call_args (gimple stmt, tree fndecl, bool args_count_match)
3086 {
3087 tree parms, p;
3088 unsigned int i, nargs;
3089
3090 /* Calls to internal functions always match their signature. */
3091 if (gimple_call_internal_p (stmt))
3092 return true;
3093
3094 nargs = gimple_call_num_args (stmt);
3095
3096 /* Get argument types for verification. */
3097 if (fndecl)
3098 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3099 else
3100 parms = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
3101
3102 /* Verify if the type of the argument matches that of the function
3103 declaration. If we cannot verify this or there is a mismatch,
3104 return false. */
3105 if (fndecl && DECL_ARGUMENTS (fndecl))
3106 {
3107 for (i = 0, p = DECL_ARGUMENTS (fndecl);
3108 i < nargs;
3109 i++, p = DECL_CHAIN (p))
3110 {
3111 tree arg;
3112 /* We cannot distinguish a varargs function from the case
3113 of excess parameters, still deferring the inlining decision
3114 to the callee is possible. */
3115 if (!p)
3116 break;
3117 arg = gimple_call_arg (stmt, i);
3118 if (p == error_mark_node
3119 || DECL_ARG_TYPE (p) == error_mark_node
3120 || arg == error_mark_node
3121 || (!types_compatible_p (DECL_ARG_TYPE (p), TREE_TYPE (arg))
3122 && !fold_convertible_p (DECL_ARG_TYPE (p), arg)))
3123 return false;
3124 }
3125 if (args_count_match && p)
3126 return false;
3127 }
3128 else if (parms)
3129 {
3130 for (i = 0, p = parms; i < nargs; i++, p = TREE_CHAIN (p))
3131 {
3132 tree arg;
3133 /* If this is a varargs function defer inlining decision
3134 to callee. */
3135 if (!p)
3136 break;
3137 arg = gimple_call_arg (stmt, i);
3138 if (TREE_VALUE (p) == error_mark_node
3139 || arg == error_mark_node
3140 || TREE_CODE (TREE_VALUE (p)) == VOID_TYPE
3141 || (!types_compatible_p (TREE_VALUE (p), TREE_TYPE (arg))
3142 && !fold_convertible_p (TREE_VALUE (p), arg)))
3143 return false;
3144 }
3145 }
3146 else
3147 {
3148 if (nargs != 0)
3149 return false;
3150 }
3151 return true;
3152 }
3153
3154 /* Verify if the type of the argument and lhs of CALL_STMT matches
3155 that of the function declaration CALLEE. If ARGS_COUNT_MATCH is
3156 true, the arg count needs to be the same.
3157 If we cannot verify this or there is a mismatch, return false. */
3158
3159 bool
3160 gimple_check_call_matching_types (gimple call_stmt, tree callee,
3161 bool args_count_match)
3162 {
3163 tree lhs;
3164
3165 if ((DECL_RESULT (callee)
3166 && !DECL_BY_REFERENCE (DECL_RESULT (callee))
3167 && (lhs = gimple_call_lhs (call_stmt)) != NULL_TREE
3168 && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee)),
3169 TREE_TYPE (lhs))
3170 && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee)), lhs))
3171 || !gimple_check_call_args (call_stmt, callee, args_count_match))
3172 return false;
3173 return true;
3174 }
3175
3176 #include "gt-cgraph.h"