* cgraph.c (dump_cgraph_node): Dump init&fini priorities.
[gcc.git] / gcc / cgraph.c
1 /* Callgraph handling code.
2 Copyright (C) 2003-2014 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This file contains basic routines manipulating call graph
22
23 The call-graph is a data structure designed for intra-procedural optimization.
24 It represents a multi-graph where nodes are functions and edges are call sites. */
25
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "tree.h"
31 #include "varasm.h"
32 #include "calls.h"
33 #include "print-tree.h"
34 #include "tree-inline.h"
35 #include "langhooks.h"
36 #include "hashtab.h"
37 #include "toplev.h"
38 #include "flags.h"
39 #include "debug.h"
40 #include "target.h"
41 #include "cgraph.h"
42 #include "intl.h"
43 #include "tree-ssa-alias.h"
44 #include "internal-fn.h"
45 #include "tree-eh.h"
46 #include "gimple-expr.h"
47 #include "gimple.h"
48 #include "gimple-iterator.h"
49 #include "timevar.h"
50 #include "dumpfile.h"
51 #include "gimple-ssa.h"
52 #include "cgraph.h"
53 #include "tree-cfg.h"
54 #include "tree-ssa.h"
55 #include "value-prof.h"
56 #include "except.h"
57 #include "diagnostic-core.h"
58 #include "rtl.h"
59 #include "ipa-utils.h"
60 #include "lto-streamer.h"
61 #include "ipa-inline.h"
62 #include "cfgloop.h"
63 #include "gimple-pretty-print.h"
64 #include "expr.h"
65 #include "tree-dfa.h"
66
67 /* FIXME: Only for PROP_loops, but cgraph shouldn't have to know about this. */
68 #include "tree-pass.h"
69
70 static void cgraph_node_remove_callers (struct cgraph_node *node);
71 static inline void cgraph_edge_remove_caller (struct cgraph_edge *e);
72 static inline void cgraph_edge_remove_callee (struct cgraph_edge *e);
73
74 /* Queue of cgraph nodes scheduled to be lowered. */
75 symtab_node *x_cgraph_nodes_queue;
76 #define cgraph_nodes_queue ((struct cgraph_node *)x_cgraph_nodes_queue)
77
78 /* Number of nodes in existence. */
79 int cgraph_n_nodes;
80
81 /* Maximal uid used in cgraph nodes. */
82 int cgraph_max_uid;
83
84 /* Maximal uid used in cgraph edges. */
85 int cgraph_edge_max_uid;
86
87 /* Set when whole unit has been analyzed so we can access global info. */
88 bool cgraph_global_info_ready = false;
89
90 /* What state callgraph is in right now. */
91 enum cgraph_state cgraph_state = CGRAPH_STATE_PARSING;
92
93 /* Set when the cgraph is fully build and the basic flags are computed. */
94 bool cgraph_function_flags_ready = false;
95
96 /* List of hooks triggered on cgraph_edge events. */
97 struct cgraph_edge_hook_list {
98 cgraph_edge_hook hook;
99 void *data;
100 struct cgraph_edge_hook_list *next;
101 };
102
103 /* List of hooks triggered on cgraph_node events. */
104 struct cgraph_node_hook_list {
105 cgraph_node_hook hook;
106 void *data;
107 struct cgraph_node_hook_list *next;
108 };
109
110 /* List of hooks triggered on events involving two cgraph_edges. */
111 struct cgraph_2edge_hook_list {
112 cgraph_2edge_hook hook;
113 void *data;
114 struct cgraph_2edge_hook_list *next;
115 };
116
117 /* List of hooks triggered on events involving two cgraph_nodes. */
118 struct cgraph_2node_hook_list {
119 cgraph_2node_hook hook;
120 void *data;
121 struct cgraph_2node_hook_list *next;
122 };
123
124 /* List of hooks triggered when an edge is removed. */
125 struct cgraph_edge_hook_list *first_cgraph_edge_removal_hook;
126 /* List of hooks triggered when a node is removed. */
127 struct cgraph_node_hook_list *first_cgraph_node_removal_hook;
128 /* List of hooks triggered when an edge is duplicated. */
129 struct cgraph_2edge_hook_list *first_cgraph_edge_duplicated_hook;
130 /* List of hooks triggered when a node is duplicated. */
131 struct cgraph_2node_hook_list *first_cgraph_node_duplicated_hook;
132 /* List of hooks triggered when an function is inserted. */
133 struct cgraph_node_hook_list *first_cgraph_function_insertion_hook;
134
135 /* Head of a linked list of unused (freed) call graph nodes.
136 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
137 static GTY(()) struct cgraph_node *free_nodes;
138 /* Head of a linked list of unused (freed) call graph edges.
139 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
140 static GTY(()) struct cgraph_edge *free_edges;
141
142 /* Did procss_same_body_aliases run? */
143 bool cpp_implicit_aliases_done;
144
145 /* Map a cgraph_node to cgraph_function_version_info using this htab.
146 The cgraph_function_version_info has a THIS_NODE field that is the
147 corresponding cgraph_node.. */
148
149 static GTY((param_is (struct cgraph_function_version_info))) htab_t
150 cgraph_fnver_htab = NULL;
151
152 /* Hash function for cgraph_fnver_htab. */
153 static hashval_t
154 cgraph_fnver_htab_hash (const void *ptr)
155 {
156 int uid = ((const struct cgraph_function_version_info *)ptr)->this_node->uid;
157 return (hashval_t)(uid);
158 }
159
160 /* eq function for cgraph_fnver_htab. */
161 static int
162 cgraph_fnver_htab_eq (const void *p1, const void *p2)
163 {
164 const struct cgraph_function_version_info *n1
165 = (const struct cgraph_function_version_info *)p1;
166 const struct cgraph_function_version_info *n2
167 = (const struct cgraph_function_version_info *)p2;
168
169 return n1->this_node->uid == n2->this_node->uid;
170 }
171
172 /* Mark as GC root all allocated nodes. */
173 static GTY(()) struct cgraph_function_version_info *
174 version_info_node = NULL;
175
176 /* Get the cgraph_function_version_info node corresponding to node. */
177 struct cgraph_function_version_info *
178 get_cgraph_node_version (struct cgraph_node *node)
179 {
180 struct cgraph_function_version_info *ret;
181 struct cgraph_function_version_info key;
182 key.this_node = node;
183
184 if (cgraph_fnver_htab == NULL)
185 return NULL;
186
187 ret = (struct cgraph_function_version_info *)
188 htab_find (cgraph_fnver_htab, &key);
189
190 return ret;
191 }
192
193 /* Insert a new cgraph_function_version_info node into cgraph_fnver_htab
194 corresponding to cgraph_node NODE. */
195 struct cgraph_function_version_info *
196 insert_new_cgraph_node_version (struct cgraph_node *node)
197 {
198 void **slot;
199
200 version_info_node = NULL;
201 version_info_node = ggc_cleared_alloc<cgraph_function_version_info> ();
202 version_info_node->this_node = node;
203
204 if (cgraph_fnver_htab == NULL)
205 cgraph_fnver_htab = htab_create_ggc (2, cgraph_fnver_htab_hash,
206 cgraph_fnver_htab_eq, NULL);
207
208 slot = htab_find_slot (cgraph_fnver_htab, version_info_node, INSERT);
209 gcc_assert (slot != NULL);
210 *slot = version_info_node;
211 return version_info_node;
212 }
213
214 /* Remove the cgraph_function_version_info and cgraph_node for DECL. This
215 DECL is a duplicate declaration. */
216 void
217 delete_function_version (tree decl)
218 {
219 struct cgraph_node *decl_node = cgraph_get_node (decl);
220 struct cgraph_function_version_info *decl_v = NULL;
221
222 if (decl_node == NULL)
223 return;
224
225 decl_v = get_cgraph_node_version (decl_node);
226
227 if (decl_v == NULL)
228 return;
229
230 if (decl_v->prev != NULL)
231 decl_v->prev->next = decl_v->next;
232
233 if (decl_v->next != NULL)
234 decl_v->next->prev = decl_v->prev;
235
236 if (cgraph_fnver_htab != NULL)
237 htab_remove_elt (cgraph_fnver_htab, decl_v);
238
239 cgraph_remove_node (decl_node);
240 }
241
242 /* Record that DECL1 and DECL2 are semantically identical function
243 versions. */
244 void
245 record_function_versions (tree decl1, tree decl2)
246 {
247 struct cgraph_node *decl1_node = cgraph_get_create_node (decl1);
248 struct cgraph_node *decl2_node = cgraph_get_create_node (decl2);
249 struct cgraph_function_version_info *decl1_v = NULL;
250 struct cgraph_function_version_info *decl2_v = NULL;
251 struct cgraph_function_version_info *before;
252 struct cgraph_function_version_info *after;
253
254 gcc_assert (decl1_node != NULL && decl2_node != NULL);
255 decl1_v = get_cgraph_node_version (decl1_node);
256 decl2_v = get_cgraph_node_version (decl2_node);
257
258 if (decl1_v != NULL && decl2_v != NULL)
259 return;
260
261 if (decl1_v == NULL)
262 decl1_v = insert_new_cgraph_node_version (decl1_node);
263
264 if (decl2_v == NULL)
265 decl2_v = insert_new_cgraph_node_version (decl2_node);
266
267 /* Chain decl2_v and decl1_v. All semantically identical versions
268 will be chained together. */
269
270 before = decl1_v;
271 after = decl2_v;
272
273 while (before->next != NULL)
274 before = before->next;
275
276 while (after->prev != NULL)
277 after= after->prev;
278
279 before->next = after;
280 after->prev = before;
281 }
282
283 /* Macros to access the next item in the list of free cgraph nodes and
284 edges. */
285 #define NEXT_FREE_NODE(NODE) cgraph ((NODE)->next)
286 #define SET_NEXT_FREE_NODE(NODE,NODE2) ((NODE))->next = NODE2
287 #define NEXT_FREE_EDGE(EDGE) (EDGE)->prev_caller
288
289 /* Register HOOK to be called with DATA on each removed edge. */
290 struct cgraph_edge_hook_list *
291 cgraph_add_edge_removal_hook (cgraph_edge_hook hook, void *data)
292 {
293 struct cgraph_edge_hook_list *entry;
294 struct cgraph_edge_hook_list **ptr = &first_cgraph_edge_removal_hook;
295
296 entry = (struct cgraph_edge_hook_list *) xmalloc (sizeof (*entry));
297 entry->hook = hook;
298 entry->data = data;
299 entry->next = NULL;
300 while (*ptr)
301 ptr = &(*ptr)->next;
302 *ptr = entry;
303 return entry;
304 }
305
306 /* Remove ENTRY from the list of hooks called on removing edges. */
307 void
308 cgraph_remove_edge_removal_hook (struct cgraph_edge_hook_list *entry)
309 {
310 struct cgraph_edge_hook_list **ptr = &first_cgraph_edge_removal_hook;
311
312 while (*ptr != entry)
313 ptr = &(*ptr)->next;
314 *ptr = entry->next;
315 free (entry);
316 }
317
318 /* Call all edge removal hooks. */
319 static void
320 cgraph_call_edge_removal_hooks (struct cgraph_edge *e)
321 {
322 struct cgraph_edge_hook_list *entry = first_cgraph_edge_removal_hook;
323 while (entry)
324 {
325 entry->hook (e, entry->data);
326 entry = entry->next;
327 }
328 }
329
330 /* Register HOOK to be called with DATA on each removed node. */
331 struct cgraph_node_hook_list *
332 cgraph_add_node_removal_hook (cgraph_node_hook hook, void *data)
333 {
334 struct cgraph_node_hook_list *entry;
335 struct cgraph_node_hook_list **ptr = &first_cgraph_node_removal_hook;
336
337 entry = (struct cgraph_node_hook_list *) xmalloc (sizeof (*entry));
338 entry->hook = hook;
339 entry->data = data;
340 entry->next = NULL;
341 while (*ptr)
342 ptr = &(*ptr)->next;
343 *ptr = entry;
344 return entry;
345 }
346
347 /* Remove ENTRY from the list of hooks called on removing nodes. */
348 void
349 cgraph_remove_node_removal_hook (struct cgraph_node_hook_list *entry)
350 {
351 struct cgraph_node_hook_list **ptr = &first_cgraph_node_removal_hook;
352
353 while (*ptr != entry)
354 ptr = &(*ptr)->next;
355 *ptr = entry->next;
356 free (entry);
357 }
358
359 /* Call all node removal hooks. */
360 static void
361 cgraph_call_node_removal_hooks (struct cgraph_node *node)
362 {
363 struct cgraph_node_hook_list *entry = first_cgraph_node_removal_hook;
364 while (entry)
365 {
366 entry->hook (node, entry->data);
367 entry = entry->next;
368 }
369 }
370
371 /* Register HOOK to be called with DATA on each inserted node. */
372 struct cgraph_node_hook_list *
373 cgraph_add_function_insertion_hook (cgraph_node_hook hook, void *data)
374 {
375 struct cgraph_node_hook_list *entry;
376 struct cgraph_node_hook_list **ptr = &first_cgraph_function_insertion_hook;
377
378 entry = (struct cgraph_node_hook_list *) xmalloc (sizeof (*entry));
379 entry->hook = hook;
380 entry->data = data;
381 entry->next = NULL;
382 while (*ptr)
383 ptr = &(*ptr)->next;
384 *ptr = entry;
385 return entry;
386 }
387
388 /* Remove ENTRY from the list of hooks called on inserted nodes. */
389 void
390 cgraph_remove_function_insertion_hook (struct cgraph_node_hook_list *entry)
391 {
392 struct cgraph_node_hook_list **ptr = &first_cgraph_function_insertion_hook;
393
394 while (*ptr != entry)
395 ptr = &(*ptr)->next;
396 *ptr = entry->next;
397 free (entry);
398 }
399
400 /* Call all node insertion hooks. */
401 void
402 cgraph_call_function_insertion_hooks (struct cgraph_node *node)
403 {
404 struct cgraph_node_hook_list *entry = first_cgraph_function_insertion_hook;
405 while (entry)
406 {
407 entry->hook (node, entry->data);
408 entry = entry->next;
409 }
410 }
411
412 /* Register HOOK to be called with DATA on each duplicated edge. */
413 struct cgraph_2edge_hook_list *
414 cgraph_add_edge_duplication_hook (cgraph_2edge_hook hook, void *data)
415 {
416 struct cgraph_2edge_hook_list *entry;
417 struct cgraph_2edge_hook_list **ptr = &first_cgraph_edge_duplicated_hook;
418
419 entry = (struct cgraph_2edge_hook_list *) xmalloc (sizeof (*entry));
420 entry->hook = hook;
421 entry->data = data;
422 entry->next = NULL;
423 while (*ptr)
424 ptr = &(*ptr)->next;
425 *ptr = entry;
426 return entry;
427 }
428
429 /* Remove ENTRY from the list of hooks called on duplicating edges. */
430 void
431 cgraph_remove_edge_duplication_hook (struct cgraph_2edge_hook_list *entry)
432 {
433 struct cgraph_2edge_hook_list **ptr = &first_cgraph_edge_duplicated_hook;
434
435 while (*ptr != entry)
436 ptr = &(*ptr)->next;
437 *ptr = entry->next;
438 free (entry);
439 }
440
441 /* Call all edge duplication hooks. */
442 void
443 cgraph_call_edge_duplication_hooks (struct cgraph_edge *cs1,
444 struct cgraph_edge *cs2)
445 {
446 struct cgraph_2edge_hook_list *entry = first_cgraph_edge_duplicated_hook;
447 while (entry)
448 {
449 entry->hook (cs1, cs2, entry->data);
450 entry = entry->next;
451 }
452 }
453
454 /* Register HOOK to be called with DATA on each duplicated node. */
455 struct cgraph_2node_hook_list *
456 cgraph_add_node_duplication_hook (cgraph_2node_hook hook, void *data)
457 {
458 struct cgraph_2node_hook_list *entry;
459 struct cgraph_2node_hook_list **ptr = &first_cgraph_node_duplicated_hook;
460
461 entry = (struct cgraph_2node_hook_list *) xmalloc (sizeof (*entry));
462 entry->hook = hook;
463 entry->data = data;
464 entry->next = NULL;
465 while (*ptr)
466 ptr = &(*ptr)->next;
467 *ptr = entry;
468 return entry;
469 }
470
471 /* Remove ENTRY from the list of hooks called on duplicating nodes. */
472 void
473 cgraph_remove_node_duplication_hook (struct cgraph_2node_hook_list *entry)
474 {
475 struct cgraph_2node_hook_list **ptr = &first_cgraph_node_duplicated_hook;
476
477 while (*ptr != entry)
478 ptr = &(*ptr)->next;
479 *ptr = entry->next;
480 free (entry);
481 }
482
483 /* Call all node duplication hooks. */
484 void
485 cgraph_call_node_duplication_hooks (struct cgraph_node *node1,
486 struct cgraph_node *node2)
487 {
488 struct cgraph_2node_hook_list *entry = first_cgraph_node_duplicated_hook;
489 while (entry)
490 {
491 entry->hook (node1, node2, entry->data);
492 entry = entry->next;
493 }
494 }
495
496 /* Allocate new callgraph node. */
497
498 static inline struct cgraph_node *
499 cgraph_allocate_node (void)
500 {
501 struct cgraph_node *node;
502
503 if (free_nodes)
504 {
505 node = free_nodes;
506 free_nodes = NEXT_FREE_NODE (node);
507 }
508 else
509 {
510 node = ggc_cleared_alloc<cgraph_node> ();
511 node->uid = cgraph_max_uid++;
512 }
513
514 return node;
515 }
516
517 /* Allocate new callgraph node and insert it into basic data structures. */
518
519 struct cgraph_node *
520 cgraph_create_empty_node (void)
521 {
522 struct cgraph_node *node = cgraph_allocate_node ();
523
524 node->type = SYMTAB_FUNCTION;
525 node->frequency = NODE_FREQUENCY_NORMAL;
526 node->count_materialization_scale = REG_BR_PROB_BASE;
527 cgraph_n_nodes++;
528 return node;
529 }
530
531 /* Return cgraph node assigned to DECL. Create new one when needed. */
532
533 struct cgraph_node *
534 cgraph_create_node (tree decl)
535 {
536 struct cgraph_node *node = cgraph_create_empty_node ();
537 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
538
539 node->decl = decl;
540 symtab_register_node (node);
541
542 if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
543 {
544 node->origin = cgraph_get_create_node (DECL_CONTEXT (decl));
545 node->next_nested = node->origin->nested;
546 node->origin->nested = node;
547 }
548 return node;
549 }
550
551 /* Try to find a call graph node for declaration DECL and if it does not exist
552 or if it corresponds to an inline clone, create a new one. */
553
554 struct cgraph_node *
555 cgraph_get_create_node (tree decl)
556 {
557 struct cgraph_node *first_clone = cgraph_get_node (decl);
558
559 if (first_clone && !first_clone->global.inlined_to)
560 return first_clone;
561
562 struct cgraph_node *node = cgraph_create_node (decl);
563 if (first_clone)
564 {
565 first_clone->clone_of = node;
566 node->clones = first_clone;
567 symtab_prevail_in_asm_name_hash (node);
568 node->decl->decl_with_vis.symtab_node = node;
569 if (dump_file)
570 fprintf (dump_file, "Introduced new external node "
571 "(%s/%i) and turned into root of the clone tree.\n",
572 xstrdup (node->name ()), node->order);
573 }
574 else if (dump_file)
575 fprintf (dump_file, "Introduced new external node "
576 "(%s/%i).\n", xstrdup (node->name ()),
577 node->order);
578 return node;
579 }
580
581 /* Mark ALIAS as an alias to DECL. DECL_NODE is cgraph node representing
582 the function body is associated with (not necessarily cgraph_node (DECL). */
583
584 struct cgraph_node *
585 cgraph_create_function_alias (tree alias, tree target)
586 {
587 struct cgraph_node *alias_node;
588
589 gcc_assert (TREE_CODE (target) == FUNCTION_DECL
590 || TREE_CODE (target) == IDENTIFIER_NODE);
591 gcc_assert (TREE_CODE (alias) == FUNCTION_DECL);
592 alias_node = cgraph_get_create_node (alias);
593 gcc_assert (!alias_node->definition);
594 alias_node->alias_target = target;
595 alias_node->definition = true;
596 alias_node->alias = true;
597 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (alias)) != NULL)
598 alias_node->weakref = true;
599 return alias_node;
600 }
601
602 /* Attempt to mark ALIAS as an alias to DECL. Return alias node if successful
603 and NULL otherwise.
604 Same body aliases are output whenever the body of DECL is output,
605 and cgraph_get_node (ALIAS) transparently returns cgraph_get_node (DECL). */
606
607 struct cgraph_node *
608 cgraph_same_body_alias (struct cgraph_node *decl_node ATTRIBUTE_UNUSED, tree alias, tree decl)
609 {
610 struct cgraph_node *n;
611 #ifndef ASM_OUTPUT_DEF
612 /* If aliases aren't supported by the assembler, fail. */
613 return NULL;
614 #endif
615 /* Langhooks can create same body aliases of symbols not defined.
616 Those are useless. Drop them on the floor. */
617 if (cgraph_global_info_ready)
618 return NULL;
619
620 n = cgraph_create_function_alias (alias, decl);
621 n->cpp_implicit_alias = true;
622 if (cpp_implicit_aliases_done)
623 symtab_resolve_alias (n,
624 cgraph_get_node (decl));
625 return n;
626 }
627
628 /* Add thunk alias into callgraph. The alias declaration is ALIAS and it
629 aliases DECL with an adjustments made into the first parameter.
630 See comments in thunk_adjust for detail on the parameters. */
631
632 struct cgraph_node *
633 cgraph_add_thunk (struct cgraph_node *decl_node ATTRIBUTE_UNUSED,
634 tree alias, tree decl ATTRIBUTE_UNUSED,
635 bool this_adjusting,
636 HOST_WIDE_INT fixed_offset, HOST_WIDE_INT virtual_value,
637 tree virtual_offset,
638 tree real_alias)
639 {
640 struct cgraph_node *node;
641
642 node = cgraph_get_node (alias);
643 if (node)
644 cgraph_reset_node (node);
645 else
646 node = cgraph_create_node (alias);
647 gcc_checking_assert (!virtual_offset
648 || wi::eq_p (virtual_offset, virtual_value));
649 node->thunk.fixed_offset = fixed_offset;
650 node->thunk.this_adjusting = this_adjusting;
651 node->thunk.virtual_value = virtual_value;
652 node->thunk.virtual_offset_p = virtual_offset != NULL;
653 node->thunk.alias = real_alias;
654 node->thunk.thunk_p = true;
655 node->definition = true;
656
657 return node;
658 }
659
660 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
661 Return NULL if there's no such node. */
662
663 struct cgraph_node *
664 cgraph_node_for_asm (tree asmname)
665 {
666 /* We do not want to look at inline clones. */
667 for (symtab_node *node = symtab_node_for_asm (asmname);
668 node;
669 node = node->next_sharing_asm_name)
670 {
671 cgraph_node *cn = dyn_cast <cgraph_node *> (node);
672 if (cn && !cn->global.inlined_to)
673 return cn;
674 }
675 return NULL;
676 }
677
678 /* Returns a hash value for X (which really is a cgraph_edge). */
679
680 static hashval_t
681 edge_hash (const void *x)
682 {
683 return htab_hash_pointer (((const struct cgraph_edge *) x)->call_stmt);
684 }
685
686 /* Return nonzero if the call_stmt of of cgraph_edge X is stmt *Y. */
687
688 static int
689 edge_eq (const void *x, const void *y)
690 {
691 return ((const struct cgraph_edge *) x)->call_stmt == y;
692 }
693
694 /* Add call graph edge E to call site hash of its caller. */
695
696 static inline void
697 cgraph_update_edge_in_call_site_hash (struct cgraph_edge *e)
698 {
699 void **slot;
700 slot = htab_find_slot_with_hash (e->caller->call_site_hash,
701 e->call_stmt,
702 htab_hash_pointer (e->call_stmt),
703 INSERT);
704 *slot = e;
705 }
706
707 /* Add call graph edge E to call site hash of its caller. */
708
709 static inline void
710 cgraph_add_edge_to_call_site_hash (struct cgraph_edge *e)
711 {
712 void **slot;
713 /* There are two speculative edges for every statement (one direct,
714 one indirect); always hash the direct one. */
715 if (e->speculative && e->indirect_unknown_callee)
716 return;
717 slot = htab_find_slot_with_hash (e->caller->call_site_hash,
718 e->call_stmt,
719 htab_hash_pointer (e->call_stmt),
720 INSERT);
721 if (*slot)
722 {
723 gcc_assert (((struct cgraph_edge *)*slot)->speculative);
724 if (e->callee)
725 *slot = e;
726 return;
727 }
728 gcc_assert (!*slot || e->speculative);
729 *slot = e;
730 }
731
732 /* Return the callgraph edge representing the GIMPLE_CALL statement
733 CALL_STMT. */
734
735 struct cgraph_edge *
736 cgraph_edge (struct cgraph_node *node, gimple call_stmt)
737 {
738 struct cgraph_edge *e, *e2;
739 int n = 0;
740
741 if (node->call_site_hash)
742 return (struct cgraph_edge *)
743 htab_find_with_hash (node->call_site_hash, call_stmt,
744 htab_hash_pointer (call_stmt));
745
746 /* This loop may turn out to be performance problem. In such case adding
747 hashtables into call nodes with very many edges is probably best
748 solution. It is not good idea to add pointer into CALL_EXPR itself
749 because we want to make possible having multiple cgraph nodes representing
750 different clones of the same body before the body is actually cloned. */
751 for (e = node->callees; e; e = e->next_callee)
752 {
753 if (e->call_stmt == call_stmt)
754 break;
755 n++;
756 }
757
758 if (!e)
759 for (e = node->indirect_calls; e; e = e->next_callee)
760 {
761 if (e->call_stmt == call_stmt)
762 break;
763 n++;
764 }
765
766 if (n > 100)
767 {
768 node->call_site_hash = htab_create_ggc (120, edge_hash, edge_eq, NULL);
769 for (e2 = node->callees; e2; e2 = e2->next_callee)
770 cgraph_add_edge_to_call_site_hash (e2);
771 for (e2 = node->indirect_calls; e2; e2 = e2->next_callee)
772 cgraph_add_edge_to_call_site_hash (e2);
773 }
774
775 return e;
776 }
777
778
779 /* Change field call_stmt of edge E to NEW_STMT.
780 If UPDATE_SPECULATIVE and E is any component of speculative
781 edge, then update all components. */
782
783 void
784 cgraph_set_call_stmt (struct cgraph_edge *e, gimple new_stmt,
785 bool update_speculative)
786 {
787 tree decl;
788
789 /* Speculative edges has three component, update all of them
790 when asked to. */
791 if (update_speculative && e->speculative)
792 {
793 struct cgraph_edge *direct, *indirect;
794 struct ipa_ref *ref;
795
796 cgraph_speculative_call_info (e, direct, indirect, ref);
797 cgraph_set_call_stmt (direct, new_stmt, false);
798 cgraph_set_call_stmt (indirect, new_stmt, false);
799 ref->stmt = new_stmt;
800 return;
801 }
802
803 /* Only direct speculative edges go to call_site_hash. */
804 if (e->caller->call_site_hash
805 && (!e->speculative || !e->indirect_unknown_callee))
806 {
807 htab_remove_elt_with_hash (e->caller->call_site_hash,
808 e->call_stmt,
809 htab_hash_pointer (e->call_stmt));
810 }
811
812 e->call_stmt = new_stmt;
813 if (e->indirect_unknown_callee
814 && (decl = gimple_call_fndecl (new_stmt)))
815 {
816 /* Constant propagation (and possibly also inlining?) can turn an
817 indirect call into a direct one. */
818 struct cgraph_node *new_callee = cgraph_get_node (decl);
819
820 gcc_checking_assert (new_callee);
821 e = cgraph_make_edge_direct (e, new_callee);
822 }
823
824 push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
825 e->can_throw_external = stmt_can_throw_external (new_stmt);
826 pop_cfun ();
827 if (e->caller->call_site_hash)
828 cgraph_add_edge_to_call_site_hash (e);
829 }
830
831 /* Allocate a cgraph_edge structure and fill it with data according to the
832 parameters of which only CALLEE can be NULL (when creating an indirect call
833 edge). */
834
835 static struct cgraph_edge *
836 cgraph_create_edge_1 (struct cgraph_node *caller, struct cgraph_node *callee,
837 gimple call_stmt, gcov_type count, int freq,
838 bool indir_unknown_callee)
839 {
840 struct cgraph_edge *edge;
841
842 /* LTO does not actually have access to the call_stmt since these
843 have not been loaded yet. */
844 if (call_stmt)
845 {
846 /* This is a rather expensive check possibly triggering
847 construction of call stmt hashtable. */
848 #ifdef ENABLE_CHECKING
849 struct cgraph_edge *e;
850 gcc_checking_assert (!(e=cgraph_edge (caller, call_stmt)) || e->speculative);
851 #endif
852
853 gcc_assert (is_gimple_call (call_stmt));
854 }
855
856 if (free_edges)
857 {
858 edge = free_edges;
859 free_edges = NEXT_FREE_EDGE (edge);
860 }
861 else
862 {
863 edge = ggc_alloc<struct cgraph_edge> ();
864 edge->uid = cgraph_edge_max_uid++;
865 }
866
867 edge->aux = NULL;
868 edge->caller = caller;
869 edge->callee = callee;
870 edge->prev_caller = NULL;
871 edge->next_caller = NULL;
872 edge->prev_callee = NULL;
873 edge->next_callee = NULL;
874 edge->lto_stmt_uid = 0;
875
876 edge->count = count;
877 gcc_assert (count >= 0);
878 edge->frequency = freq;
879 gcc_assert (freq >= 0);
880 gcc_assert (freq <= CGRAPH_FREQ_MAX);
881
882 edge->call_stmt = call_stmt;
883 push_cfun (DECL_STRUCT_FUNCTION (caller->decl));
884 edge->can_throw_external
885 = call_stmt ? stmt_can_throw_external (call_stmt) : false;
886 pop_cfun ();
887 if (call_stmt
888 && callee && callee->decl
889 && !gimple_check_call_matching_types (call_stmt, callee->decl,
890 false))
891 edge->call_stmt_cannot_inline_p = true;
892 else
893 edge->call_stmt_cannot_inline_p = false;
894
895 edge->indirect_info = NULL;
896 edge->indirect_inlining_edge = 0;
897 edge->speculative = false;
898 edge->indirect_unknown_callee = indir_unknown_callee;
899 if (call_stmt && caller->call_site_hash)
900 cgraph_add_edge_to_call_site_hash (edge);
901
902 return edge;
903 }
904
905 /* Create edge from CALLER to CALLEE in the cgraph. */
906
907 struct cgraph_edge *
908 cgraph_create_edge (struct cgraph_node *caller, struct cgraph_node *callee,
909 gimple call_stmt, gcov_type count, int freq)
910 {
911 struct cgraph_edge *edge = cgraph_create_edge_1 (caller, callee, call_stmt,
912 count, freq, false);
913
914 initialize_inline_failed (edge);
915
916 edge->next_caller = callee->callers;
917 if (callee->callers)
918 callee->callers->prev_caller = edge;
919 edge->next_callee = caller->callees;
920 if (caller->callees)
921 caller->callees->prev_callee = edge;
922 caller->callees = edge;
923 callee->callers = edge;
924
925 return edge;
926 }
927
928 /* Allocate cgraph_indirect_call_info and set its fields to default values. */
929
930 struct cgraph_indirect_call_info *
931 cgraph_allocate_init_indirect_info (void)
932 {
933 struct cgraph_indirect_call_info *ii;
934
935 ii = ggc_cleared_alloc<cgraph_indirect_call_info> ();
936 ii->param_index = -1;
937 return ii;
938 }
939
940 /* Create an indirect edge with a yet-undetermined callee where the call
941 statement destination is a formal parameter of the caller with index
942 PARAM_INDEX. */
943
944 struct cgraph_edge *
945 cgraph_create_indirect_edge (struct cgraph_node *caller, gimple call_stmt,
946 int ecf_flags,
947 gcov_type count, int freq)
948 {
949 struct cgraph_edge *edge = cgraph_create_edge_1 (caller, NULL, call_stmt,
950 count, freq, true);
951 tree target;
952
953 initialize_inline_failed (edge);
954
955 edge->indirect_info = cgraph_allocate_init_indirect_info ();
956 edge->indirect_info->ecf_flags = ecf_flags;
957
958 /* Record polymorphic call info. */
959 if (call_stmt
960 && (target = gimple_call_fn (call_stmt))
961 && virtual_method_call_p (target))
962 {
963 tree otr_type;
964 HOST_WIDE_INT otr_token;
965 ipa_polymorphic_call_context context;
966
967 get_polymorphic_call_info (caller->decl,
968 target,
969 &otr_type, &otr_token,
970 &context);
971
972 /* Only record types can have virtual calls. */
973 gcc_assert (TREE_CODE (otr_type) == RECORD_TYPE);
974 edge->indirect_info->polymorphic = true;
975 edge->indirect_info->param_index = -1;
976 edge->indirect_info->otr_token = otr_token;
977 edge->indirect_info->otr_type = otr_type;
978 edge->indirect_info->outer_type = context.outer_type;
979 edge->indirect_info->offset = context.offset;
980 edge->indirect_info->maybe_in_construction
981 = context.maybe_in_construction;
982 edge->indirect_info->maybe_derived_type = context.maybe_derived_type;
983 }
984
985 edge->next_callee = caller->indirect_calls;
986 if (caller->indirect_calls)
987 caller->indirect_calls->prev_callee = edge;
988 caller->indirect_calls = edge;
989
990 return edge;
991 }
992
993 /* Remove the edge E from the list of the callers of the callee. */
994
995 static inline void
996 cgraph_edge_remove_callee (struct cgraph_edge *e)
997 {
998 gcc_assert (!e->indirect_unknown_callee);
999 if (e->prev_caller)
1000 e->prev_caller->next_caller = e->next_caller;
1001 if (e->next_caller)
1002 e->next_caller->prev_caller = e->prev_caller;
1003 if (!e->prev_caller)
1004 e->callee->callers = e->next_caller;
1005 }
1006
1007 /* Remove the edge E from the list of the callees of the caller. */
1008
1009 static inline void
1010 cgraph_edge_remove_caller (struct cgraph_edge *e)
1011 {
1012 if (e->prev_callee)
1013 e->prev_callee->next_callee = e->next_callee;
1014 if (e->next_callee)
1015 e->next_callee->prev_callee = e->prev_callee;
1016 if (!e->prev_callee)
1017 {
1018 if (e->indirect_unknown_callee)
1019 e->caller->indirect_calls = e->next_callee;
1020 else
1021 e->caller->callees = e->next_callee;
1022 }
1023 if (e->caller->call_site_hash)
1024 htab_remove_elt_with_hash (e->caller->call_site_hash,
1025 e->call_stmt,
1026 htab_hash_pointer (e->call_stmt));
1027 }
1028
1029 /* Put the edge onto the free list. */
1030
1031 static void
1032 cgraph_free_edge (struct cgraph_edge *e)
1033 {
1034 int uid = e->uid;
1035
1036 if (e->indirect_info)
1037 ggc_free (e->indirect_info);
1038
1039 /* Clear out the edge so we do not dangle pointers. */
1040 memset (e, 0, sizeof (*e));
1041 e->uid = uid;
1042 NEXT_FREE_EDGE (e) = free_edges;
1043 free_edges = e;
1044 }
1045
1046 /* Remove the edge E in the cgraph. */
1047
1048 void
1049 cgraph_remove_edge (struct cgraph_edge *e)
1050 {
1051 /* Call all edge removal hooks. */
1052 cgraph_call_edge_removal_hooks (e);
1053
1054 if (!e->indirect_unknown_callee)
1055 /* Remove from callers list of the callee. */
1056 cgraph_edge_remove_callee (e);
1057
1058 /* Remove from callees list of the callers. */
1059 cgraph_edge_remove_caller (e);
1060
1061 /* Put the edge onto the free list. */
1062 cgraph_free_edge (e);
1063 }
1064
1065 /* Set callee of call graph edge E and add it to the corresponding set of
1066 callers. */
1067
1068 static void
1069 cgraph_set_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
1070 {
1071 e->prev_caller = NULL;
1072 if (n->callers)
1073 n->callers->prev_caller = e;
1074 e->next_caller = n->callers;
1075 n->callers = e;
1076 e->callee = n;
1077 }
1078
1079 /* Turn edge E into speculative call calling N2. Update
1080 the profile so the direct call is taken COUNT times
1081 with FREQUENCY.
1082
1083 At clone materialization time, the indirect call E will
1084 be expanded as:
1085
1086 if (call_dest == N2)
1087 n2 ();
1088 else
1089 call call_dest
1090
1091 At this time the function just creates the direct call,
1092 the referencd representing the if conditional and attaches
1093 them all to the orginal indirect call statement.
1094
1095 Return direct edge created. */
1096
1097 struct cgraph_edge *
1098 cgraph_turn_edge_to_speculative (struct cgraph_edge *e,
1099 struct cgraph_node *n2,
1100 gcov_type direct_count,
1101 int direct_frequency)
1102 {
1103 struct cgraph_node *n = e->caller;
1104 struct ipa_ref *ref = NULL;
1105 struct cgraph_edge *e2;
1106
1107 if (dump_file)
1108 {
1109 fprintf (dump_file, "Indirect call -> speculative call"
1110 " %s/%i => %s/%i\n",
1111 xstrdup (n->name ()), n->order,
1112 xstrdup (n2->name ()), n2->order);
1113 }
1114 e->speculative = true;
1115 e2 = cgraph_create_edge (n, n2, e->call_stmt, direct_count, direct_frequency);
1116 initialize_inline_failed (e2);
1117 e2->speculative = true;
1118 if (TREE_NOTHROW (n2->decl))
1119 e2->can_throw_external = false;
1120 else
1121 e2->can_throw_external = e->can_throw_external;
1122 e2->lto_stmt_uid = e->lto_stmt_uid;
1123 e->count -= e2->count;
1124 e->frequency -= e2->frequency;
1125 cgraph_call_edge_duplication_hooks (e, e2);
1126 ref = n->add_reference (n2, IPA_REF_ADDR, e->call_stmt);
1127 ref->lto_stmt_uid = e->lto_stmt_uid;
1128 ref->speculative = e->speculative;
1129 cgraph_mark_address_taken_node (n2);
1130 return e2;
1131 }
1132
1133 /* Speculative call consist of three components:
1134 1) an indirect edge representing the original call
1135 2) an direct edge representing the new call
1136 3) ADDR_EXPR reference representing the speculative check.
1137 All three components are attached to single statement (the indirect
1138 call) and if one of them exists, all of them must exist.
1139
1140 Given speculative call edge E, return all three components.
1141 */
1142
1143 void
1144 cgraph_speculative_call_info (struct cgraph_edge *e,
1145 struct cgraph_edge *&direct,
1146 struct cgraph_edge *&indirect,
1147 struct ipa_ref *&reference)
1148 {
1149 struct ipa_ref *ref;
1150 int i;
1151 struct cgraph_edge *e2;
1152
1153 if (!e->indirect_unknown_callee)
1154 for (e2 = e->caller->indirect_calls;
1155 e2->call_stmt != e->call_stmt || e2->lto_stmt_uid != e->lto_stmt_uid;
1156 e2 = e2->next_callee)
1157 ;
1158 else
1159 {
1160 e2 = e;
1161 /* We can take advantage of the call stmt hash. */
1162 if (e2->call_stmt)
1163 {
1164 e = cgraph_edge (e->caller, e2->call_stmt);
1165 gcc_assert (e->speculative && !e->indirect_unknown_callee);
1166 }
1167 else
1168 for (e = e->caller->callees;
1169 e2->call_stmt != e->call_stmt
1170 || e2->lto_stmt_uid != e->lto_stmt_uid;
1171 e = e->next_callee)
1172 ;
1173 }
1174 gcc_assert (e->speculative && e2->speculative);
1175 direct = e;
1176 indirect = e2;
1177
1178 reference = NULL;
1179 for (i = 0; e->caller->iterate_reference (i, ref); i++)
1180 if (ref->speculative
1181 && ((ref->stmt && ref->stmt == e->call_stmt)
1182 || (!ref->stmt && ref->lto_stmt_uid == e->lto_stmt_uid)))
1183 {
1184 reference = ref;
1185 break;
1186 }
1187
1188 /* Speculative edge always consist of all three components - direct edge,
1189 indirect and reference. */
1190
1191 gcc_assert (e && e2 && ref);
1192 }
1193
1194 /* Redirect callee of E to N. The function does not update underlying
1195 call expression. */
1196
1197 void
1198 cgraph_redirect_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
1199 {
1200 /* Remove from callers list of the current callee. */
1201 cgraph_edge_remove_callee (e);
1202
1203 /* Insert to callers list of the new callee. */
1204 cgraph_set_edge_callee (e, n);
1205 }
1206
1207 /* Speculative call EDGE turned out to be direct call to CALLE_DECL.
1208 Remove the speculative call sequence and return edge representing the call.
1209 It is up to caller to redirect the call as appropriate. */
1210
1211 struct cgraph_edge *
1212 cgraph_resolve_speculation (struct cgraph_edge *edge, tree callee_decl)
1213 {
1214 struct cgraph_edge *e2;
1215 struct ipa_ref *ref;
1216
1217 gcc_assert (edge->speculative);
1218 cgraph_speculative_call_info (edge, e2, edge, ref);
1219 if (!callee_decl
1220 || !symtab_semantically_equivalent_p (ref->referred,
1221 symtab_get_node (callee_decl)))
1222 {
1223 if (dump_file)
1224 {
1225 if (callee_decl)
1226 {
1227 fprintf (dump_file, "Speculative indirect call %s/%i => %s/%i has "
1228 "turned out to have contradicting known target ",
1229 xstrdup (edge->caller->name ()), edge->caller->order,
1230 xstrdup (e2->callee->name ()), e2->callee->order);
1231 print_generic_expr (dump_file, callee_decl, 0);
1232 fprintf (dump_file, "\n");
1233 }
1234 else
1235 {
1236 fprintf (dump_file, "Removing speculative call %s/%i => %s/%i\n",
1237 xstrdup (edge->caller->name ()), edge->caller->order,
1238 xstrdup (e2->callee->name ()), e2->callee->order);
1239 }
1240 }
1241 }
1242 else
1243 {
1244 struct cgraph_edge *tmp = edge;
1245 if (dump_file)
1246 fprintf (dump_file, "Speculative call turned into direct call.\n");
1247 edge = e2;
1248 e2 = tmp;
1249 /* FIXME: If EDGE is inlined, we should scale up the frequencies and counts
1250 in the functions inlined through it. */
1251 }
1252 edge->count += e2->count;
1253 edge->frequency += e2->frequency;
1254 if (edge->frequency > CGRAPH_FREQ_MAX)
1255 edge->frequency = CGRAPH_FREQ_MAX;
1256 edge->speculative = false;
1257 e2->speculative = false;
1258 ref->remove_reference ();
1259 if (e2->indirect_unknown_callee || e2->inline_failed)
1260 cgraph_remove_edge (e2);
1261 else
1262 cgraph_remove_node_and_inline_clones (e2->callee, NULL);
1263 if (edge->caller->call_site_hash)
1264 cgraph_update_edge_in_call_site_hash (edge);
1265 return edge;
1266 }
1267
1268 /* Make an indirect EDGE with an unknown callee an ordinary edge leading to
1269 CALLEE. DELTA is an integer constant that is to be added to the this
1270 pointer (first parameter) to compensate for skipping a thunk adjustment. */
1271
1272 struct cgraph_edge *
1273 cgraph_make_edge_direct (struct cgraph_edge *edge, struct cgraph_node *callee)
1274 {
1275 gcc_assert (edge->indirect_unknown_callee);
1276
1277 /* If we are redirecting speculative call, make it non-speculative. */
1278 if (edge->indirect_unknown_callee && edge->speculative)
1279 {
1280 edge = cgraph_resolve_speculation (edge, callee->decl);
1281
1282 /* On successful speculation just return the pre existing direct edge. */
1283 if (!edge->indirect_unknown_callee)
1284 return edge;
1285 }
1286
1287 edge->indirect_unknown_callee = 0;
1288 ggc_free (edge->indirect_info);
1289 edge->indirect_info = NULL;
1290
1291 /* Get the edge out of the indirect edge list. */
1292 if (edge->prev_callee)
1293 edge->prev_callee->next_callee = edge->next_callee;
1294 if (edge->next_callee)
1295 edge->next_callee->prev_callee = edge->prev_callee;
1296 if (!edge->prev_callee)
1297 edge->caller->indirect_calls = edge->next_callee;
1298
1299 /* Put it into the normal callee list */
1300 edge->prev_callee = NULL;
1301 edge->next_callee = edge->caller->callees;
1302 if (edge->caller->callees)
1303 edge->caller->callees->prev_callee = edge;
1304 edge->caller->callees = edge;
1305
1306 /* Insert to callers list of the new callee. */
1307 cgraph_set_edge_callee (edge, callee);
1308
1309 if (edge->call_stmt)
1310 edge->call_stmt_cannot_inline_p
1311 = !gimple_check_call_matching_types (edge->call_stmt, callee->decl,
1312 false);
1313
1314 /* We need to re-determine the inlining status of the edge. */
1315 initialize_inline_failed (edge);
1316 return edge;
1317 }
1318
1319 /* If necessary, change the function declaration in the call statement
1320 associated with E so that it corresponds to the edge callee. */
1321
1322 gimple
1323 cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e)
1324 {
1325 tree decl = gimple_call_fndecl (e->call_stmt);
1326 tree lhs = gimple_call_lhs (e->call_stmt);
1327 gimple new_stmt;
1328 gimple_stmt_iterator gsi;
1329 #ifdef ENABLE_CHECKING
1330 struct cgraph_node *node;
1331 #endif
1332
1333 if (e->speculative)
1334 {
1335 struct cgraph_edge *e2;
1336 gimple new_stmt;
1337 struct ipa_ref *ref;
1338
1339 cgraph_speculative_call_info (e, e, e2, ref);
1340 /* If there already is an direct call (i.e. as a result of inliner's
1341 substitution), forget about speculating. */
1342 if (decl)
1343 e = cgraph_resolve_speculation (e, decl);
1344 /* If types do not match, speculation was likely wrong.
1345 The direct edge was posisbly redirected to the clone with a different
1346 signature. We did not update the call statement yet, so compare it
1347 with the reference that still points to the proper type. */
1348 else if (!gimple_check_call_matching_types (e->call_stmt,
1349 ref->referred->decl,
1350 true))
1351 {
1352 if (dump_file)
1353 fprintf (dump_file, "Not expanding speculative call of %s/%i -> %s/%i\n"
1354 "Type mismatch.\n",
1355 xstrdup (e->caller->name ()),
1356 e->caller->order,
1357 xstrdup (e->callee->name ()),
1358 e->callee->order);
1359 e = cgraph_resolve_speculation (e, NULL);
1360 /* We are producing the final function body and will throw away the
1361 callgraph edges really soon. Reset the counts/frequencies to
1362 keep verifier happy in the case of roundoff errors. */
1363 e->count = gimple_bb (e->call_stmt)->count;
1364 e->frequency = compute_call_stmt_bb_frequency
1365 (e->caller->decl, gimple_bb (e->call_stmt));
1366 }
1367 /* Expand speculation into GIMPLE code. */
1368 else
1369 {
1370 if (dump_file)
1371 fprintf (dump_file,
1372 "Expanding speculative call of %s/%i -> %s/%i count:"
1373 "%"PRId64"\n",
1374 xstrdup (e->caller->name ()),
1375 e->caller->order,
1376 xstrdup (e->callee->name ()),
1377 e->callee->order,
1378 (int64_t)e->count);
1379 gcc_assert (e2->speculative);
1380 push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
1381 new_stmt = gimple_ic (e->call_stmt, cgraph (ref->referred),
1382 e->count || e2->count
1383 ? RDIV (e->count * REG_BR_PROB_BASE,
1384 e->count + e2->count)
1385 : e->frequency || e2->frequency
1386 ? RDIV (e->frequency * REG_BR_PROB_BASE,
1387 e->frequency + e2->frequency)
1388 : REG_BR_PROB_BASE / 2,
1389 e->count, e->count + e2->count);
1390 e->speculative = false;
1391 cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt,
1392 new_stmt, false);
1393 e->frequency = compute_call_stmt_bb_frequency
1394 (e->caller->decl, gimple_bb (e->call_stmt));
1395 e2->frequency = compute_call_stmt_bb_frequency
1396 (e2->caller->decl, gimple_bb (e2->call_stmt));
1397 e2->speculative = false;
1398 ref->speculative = false;
1399 ref->stmt = NULL;
1400 /* Indirect edges are not both in the call site hash.
1401 get it updated. */
1402 if (e->caller->call_site_hash)
1403 cgraph_update_edge_in_call_site_hash (e2);
1404 pop_cfun ();
1405 /* Continue redirecting E to proper target. */
1406 }
1407 }
1408
1409 if (e->indirect_unknown_callee
1410 || decl == e->callee->decl)
1411 return e->call_stmt;
1412
1413 #ifdef ENABLE_CHECKING
1414 if (decl)
1415 {
1416 node = cgraph_get_node (decl);
1417 gcc_assert (!node || !node->clone.combined_args_to_skip);
1418 }
1419 #endif
1420
1421 if (cgraph_dump_file)
1422 {
1423 fprintf (cgraph_dump_file, "updating call of %s/%i -> %s/%i: ",
1424 xstrdup (e->caller->name ()), e->caller->order,
1425 xstrdup (e->callee->name ()), e->callee->order);
1426 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
1427 if (e->callee->clone.combined_args_to_skip)
1428 {
1429 fprintf (cgraph_dump_file, " combined args to skip: ");
1430 dump_bitmap (cgraph_dump_file,
1431 e->callee->clone.combined_args_to_skip);
1432 }
1433 }
1434
1435 if (e->callee->clone.combined_args_to_skip)
1436 {
1437 int lp_nr;
1438
1439 new_stmt
1440 = gimple_call_copy_skip_args (e->call_stmt,
1441 e->callee->clone.combined_args_to_skip);
1442 gimple_call_set_fndecl (new_stmt, e->callee->decl);
1443 gimple_call_set_fntype (new_stmt, gimple_call_fntype (e->call_stmt));
1444
1445 if (gimple_vdef (new_stmt)
1446 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1447 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1448
1449 gsi = gsi_for_stmt (e->call_stmt);
1450 gsi_replace (&gsi, new_stmt, false);
1451 /* We need to defer cleaning EH info on the new statement to
1452 fixup-cfg. We may not have dominator information at this point
1453 and thus would end up with unreachable blocks and have no way
1454 to communicate that we need to run CFG cleanup then. */
1455 lp_nr = lookup_stmt_eh_lp (e->call_stmt);
1456 if (lp_nr != 0)
1457 {
1458 remove_stmt_from_eh_lp (e->call_stmt);
1459 add_stmt_to_eh_lp (new_stmt, lp_nr);
1460 }
1461 }
1462 else
1463 {
1464 new_stmt = e->call_stmt;
1465 gimple_call_set_fndecl (new_stmt, e->callee->decl);
1466 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1467 }
1468
1469 /* If the call becomes noreturn, remove the lhs. */
1470 if (lhs && (gimple_call_flags (new_stmt) & ECF_NORETURN))
1471 {
1472 if (TREE_CODE (lhs) == SSA_NAME)
1473 {
1474 tree var = create_tmp_reg_fn (DECL_STRUCT_FUNCTION (e->caller->decl),
1475 TREE_TYPE (lhs), NULL);
1476 var = get_or_create_ssa_default_def
1477 (DECL_STRUCT_FUNCTION (e->caller->decl), var);
1478 gimple set_stmt = gimple_build_assign (lhs, var);
1479 gsi = gsi_for_stmt (new_stmt);
1480 gsi_insert_before_without_update (&gsi, set_stmt, GSI_SAME_STMT);
1481 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), set_stmt);
1482 }
1483 gimple_call_set_lhs (new_stmt, NULL_TREE);
1484 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1485 }
1486
1487 /* If new callee has no static chain, remove it. */
1488 if (gimple_call_chain (new_stmt) && !DECL_STATIC_CHAIN (e->callee->decl))
1489 {
1490 gimple_call_set_chain (new_stmt, NULL);
1491 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1492 }
1493
1494 cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt, new_stmt, false);
1495
1496 if (cgraph_dump_file)
1497 {
1498 fprintf (cgraph_dump_file, " updated to:");
1499 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
1500 }
1501 return new_stmt;
1502 }
1503
1504 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1505 OLD_STMT changed into NEW_STMT. OLD_CALL is gimple_call_fndecl
1506 of OLD_STMT if it was previously call statement.
1507 If NEW_STMT is NULL, the call has been dropped without any
1508 replacement. */
1509
1510 static void
1511 cgraph_update_edges_for_call_stmt_node (struct cgraph_node *node,
1512 gimple old_stmt, tree old_call,
1513 gimple new_stmt)
1514 {
1515 tree new_call = (new_stmt && is_gimple_call (new_stmt))
1516 ? gimple_call_fndecl (new_stmt) : 0;
1517
1518 /* We are seeing indirect calls, then there is nothing to update. */
1519 if (!new_call && !old_call)
1520 return;
1521 /* See if we turned indirect call into direct call or folded call to one builtin
1522 into different builtin. */
1523 if (old_call != new_call)
1524 {
1525 struct cgraph_edge *e = cgraph_edge (node, old_stmt);
1526 struct cgraph_edge *ne = NULL;
1527 gcov_type count;
1528 int frequency;
1529
1530 if (e)
1531 {
1532 /* See if the edge is already there and has the correct callee. It
1533 might be so because of indirect inlining has already updated
1534 it. We also might've cloned and redirected the edge. */
1535 if (new_call && e->callee)
1536 {
1537 struct cgraph_node *callee = e->callee;
1538 while (callee)
1539 {
1540 if (callee->decl == new_call
1541 || callee->former_clone_of == new_call)
1542 {
1543 cgraph_set_call_stmt (e, new_stmt);
1544 return;
1545 }
1546 callee = callee->clone_of;
1547 }
1548 }
1549
1550 /* Otherwise remove edge and create new one; we can't simply redirect
1551 since function has changed, so inline plan and other information
1552 attached to edge is invalid. */
1553 count = e->count;
1554 frequency = e->frequency;
1555 if (e->indirect_unknown_callee || e->inline_failed)
1556 cgraph_remove_edge (e);
1557 else
1558 cgraph_remove_node_and_inline_clones (e->callee, NULL);
1559 }
1560 else if (new_call)
1561 {
1562 /* We are seeing new direct call; compute profile info based on BB. */
1563 basic_block bb = gimple_bb (new_stmt);
1564 count = bb->count;
1565 frequency = compute_call_stmt_bb_frequency (current_function_decl,
1566 bb);
1567 }
1568
1569 if (new_call)
1570 {
1571 ne = cgraph_create_edge (node, cgraph_get_create_node (new_call),
1572 new_stmt, count, frequency);
1573 gcc_assert (ne->inline_failed);
1574 }
1575 }
1576 /* We only updated the call stmt; update pointer in cgraph edge.. */
1577 else if (old_stmt != new_stmt)
1578 cgraph_set_call_stmt (cgraph_edge (node, old_stmt), new_stmt);
1579 }
1580
1581 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1582 OLD_STMT changed into NEW_STMT. OLD_DECL is gimple_call_fndecl
1583 of OLD_STMT before it was updated (updating can happen inplace). */
1584
1585 void
1586 cgraph_update_edges_for_call_stmt (gimple old_stmt, tree old_decl, gimple new_stmt)
1587 {
1588 struct cgraph_node *orig = cgraph_get_node (cfun->decl);
1589 struct cgraph_node *node;
1590
1591 gcc_checking_assert (orig);
1592 cgraph_update_edges_for_call_stmt_node (orig, old_stmt, old_decl, new_stmt);
1593 if (orig->clones)
1594 for (node = orig->clones; node != orig;)
1595 {
1596 cgraph_update_edges_for_call_stmt_node (node, old_stmt, old_decl, new_stmt);
1597 if (node->clones)
1598 node = node->clones;
1599 else if (node->next_sibling_clone)
1600 node = node->next_sibling_clone;
1601 else
1602 {
1603 while (node != orig && !node->next_sibling_clone)
1604 node = node->clone_of;
1605 if (node != orig)
1606 node = node->next_sibling_clone;
1607 }
1608 }
1609 }
1610
1611
1612 /* Remove all callees from the node. */
1613
1614 void
1615 cgraph_node_remove_callees (struct cgraph_node *node)
1616 {
1617 struct cgraph_edge *e, *f;
1618
1619 /* It is sufficient to remove the edges from the lists of callers of
1620 the callees. The callee list of the node can be zapped with one
1621 assignment. */
1622 for (e = node->callees; e; e = f)
1623 {
1624 f = e->next_callee;
1625 cgraph_call_edge_removal_hooks (e);
1626 if (!e->indirect_unknown_callee)
1627 cgraph_edge_remove_callee (e);
1628 cgraph_free_edge (e);
1629 }
1630 for (e = node->indirect_calls; e; e = f)
1631 {
1632 f = e->next_callee;
1633 cgraph_call_edge_removal_hooks (e);
1634 if (!e->indirect_unknown_callee)
1635 cgraph_edge_remove_callee (e);
1636 cgraph_free_edge (e);
1637 }
1638 node->indirect_calls = NULL;
1639 node->callees = NULL;
1640 if (node->call_site_hash)
1641 {
1642 htab_delete (node->call_site_hash);
1643 node->call_site_hash = NULL;
1644 }
1645 }
1646
1647 /* Remove all callers from the node. */
1648
1649 static void
1650 cgraph_node_remove_callers (struct cgraph_node *node)
1651 {
1652 struct cgraph_edge *e, *f;
1653
1654 /* It is sufficient to remove the edges from the lists of callees of
1655 the callers. The caller list of the node can be zapped with one
1656 assignment. */
1657 for (e = node->callers; e; e = f)
1658 {
1659 f = e->next_caller;
1660 cgraph_call_edge_removal_hooks (e);
1661 cgraph_edge_remove_caller (e);
1662 cgraph_free_edge (e);
1663 }
1664 node->callers = NULL;
1665 }
1666
1667 /* Helper function for cgraph_release_function_body and free_lang_data.
1668 It releases body from function DECL without having to inspect its
1669 possibly non-existent symtab node. */
1670
1671 void
1672 release_function_body (tree decl)
1673 {
1674 if (DECL_STRUCT_FUNCTION (decl))
1675 {
1676 push_cfun (DECL_STRUCT_FUNCTION (decl));
1677 if (cfun->cfg
1678 && current_loops)
1679 {
1680 cfun->curr_properties &= ~PROP_loops;
1681 loop_optimizer_finalize ();
1682 }
1683 if (cfun->gimple_df)
1684 {
1685 delete_tree_ssa ();
1686 delete_tree_cfg_annotations ();
1687 cfun->eh = NULL;
1688 }
1689 if (cfun->cfg)
1690 {
1691 gcc_assert (!dom_info_available_p (CDI_DOMINATORS));
1692 gcc_assert (!dom_info_available_p (CDI_POST_DOMINATORS));
1693 clear_edges ();
1694 cfun->cfg = NULL;
1695 }
1696 if (cfun->value_histograms)
1697 free_histograms ();
1698 pop_cfun ();
1699 gimple_set_body (decl, NULL);
1700 /* Struct function hangs a lot of data that would leak if we didn't
1701 removed all pointers to it. */
1702 ggc_free (DECL_STRUCT_FUNCTION (decl));
1703 DECL_STRUCT_FUNCTION (decl) = NULL;
1704 }
1705 DECL_SAVED_TREE (decl) = NULL;
1706 }
1707
1708 /* Release memory used to represent body of function NODE.
1709 Use this only for functions that are released before being translated to
1710 target code (i.e. RTL). Functions that are compiled to RTL and beyond
1711 are free'd in final.c via free_after_compilation(). */
1712
1713 void
1714 cgraph_release_function_body (struct cgraph_node *node)
1715 {
1716 node->ipa_transforms_to_apply.release ();
1717 if (!node->used_as_abstract_origin && cgraph_state != CGRAPH_STATE_PARSING)
1718 {
1719 DECL_RESULT (node->decl) = NULL;
1720 DECL_ARGUMENTS (node->decl) = NULL;
1721 }
1722 /* If the node is abstract and needed, then do not clear DECL_INITIAL
1723 of its associated function function declaration because it's
1724 needed to emit debug info later. */
1725 if (!node->used_as_abstract_origin && DECL_INITIAL (node->decl))
1726 DECL_INITIAL (node->decl) = error_mark_node;
1727 release_function_body (node->decl);
1728 if (node->lto_file_data)
1729 lto_free_function_in_decl_state_for_node (node);
1730 }
1731
1732 /* Remove the node from cgraph. */
1733
1734 void
1735 cgraph_remove_node (struct cgraph_node *node)
1736 {
1737 struct cgraph_node *n;
1738 int uid = node->uid;
1739
1740 cgraph_call_node_removal_hooks (node);
1741 cgraph_node_remove_callers (node);
1742 cgraph_node_remove_callees (node);
1743 node->ipa_transforms_to_apply.release ();
1744
1745 /* Incremental inlining access removed nodes stored in the postorder list.
1746 */
1747 node->force_output = false;
1748 node->forced_by_abi = false;
1749 for (n = node->nested; n; n = n->next_nested)
1750 n->origin = NULL;
1751 node->nested = NULL;
1752 if (node->origin)
1753 {
1754 struct cgraph_node **node2 = &node->origin->nested;
1755
1756 while (*node2 != node)
1757 node2 = &(*node2)->next_nested;
1758 *node2 = node->next_nested;
1759 }
1760 symtab_unregister_node (node);
1761 if (node->prev_sibling_clone)
1762 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
1763 else if (node->clone_of)
1764 node->clone_of->clones = node->next_sibling_clone;
1765 if (node->next_sibling_clone)
1766 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
1767 if (node->clones)
1768 {
1769 struct cgraph_node *n, *next;
1770
1771 if (node->clone_of)
1772 {
1773 for (n = node->clones; n->next_sibling_clone; n = n->next_sibling_clone)
1774 n->clone_of = node->clone_of;
1775 n->clone_of = node->clone_of;
1776 n->next_sibling_clone = node->clone_of->clones;
1777 if (node->clone_of->clones)
1778 node->clone_of->clones->prev_sibling_clone = n;
1779 node->clone_of->clones = node->clones;
1780 }
1781 else
1782 {
1783 /* We are removing node with clones. This makes clones inconsistent,
1784 but assume they will be removed subsequently and just keep clone
1785 tree intact. This can happen in unreachable function removal since
1786 we remove unreachable functions in random order, not by bottom-up
1787 walk of clone trees. */
1788 for (n = node->clones; n; n = next)
1789 {
1790 next = n->next_sibling_clone;
1791 n->next_sibling_clone = NULL;
1792 n->prev_sibling_clone = NULL;
1793 n->clone_of = NULL;
1794 }
1795 }
1796 }
1797
1798 /* While all the clones are removed after being proceeded, the function
1799 itself is kept in the cgraph even after it is compiled. Check whether
1800 we are done with this body and reclaim it proactively if this is the case.
1801 */
1802 if (cgraph_state != CGRAPH_LTO_STREAMING)
1803 {
1804 n = cgraph_get_node (node->decl);
1805 if (!n
1806 || (!n->clones && !n->clone_of && !n->global.inlined_to
1807 && (cgraph_global_info_ready
1808 && (TREE_ASM_WRITTEN (n->decl)
1809 || DECL_EXTERNAL (n->decl)
1810 || !n->analyzed
1811 || (!flag_wpa && n->in_other_partition)))))
1812 cgraph_release_function_body (node);
1813 }
1814
1815 node->decl = NULL;
1816 if (node->call_site_hash)
1817 {
1818 htab_delete (node->call_site_hash);
1819 node->call_site_hash = NULL;
1820 }
1821 cgraph_n_nodes--;
1822
1823 /* Clear out the node to NULL all pointers and add the node to the free
1824 list. */
1825 memset (node, 0, sizeof (*node));
1826 node->type = SYMTAB_FUNCTION;
1827 node->uid = uid;
1828 SET_NEXT_FREE_NODE (node, free_nodes);
1829 free_nodes = node;
1830 }
1831
1832 /* Likewise indicate that a node is having address taken. */
1833
1834 void
1835 cgraph_mark_address_taken_node (struct cgraph_node *node)
1836 {
1837 /* Indirect inlining can figure out that all uses of the address are
1838 inlined. */
1839 if (node->global.inlined_to)
1840 {
1841 gcc_assert (cfun->after_inlining);
1842 gcc_assert (node->callers->indirect_inlining_edge);
1843 return;
1844 }
1845 /* FIXME: address_taken flag is used both as a shortcut for testing whether
1846 IPA_REF_ADDR reference exists (and thus it should be set on node
1847 representing alias we take address of) and as a test whether address
1848 of the object was taken (and thus it should be set on node alias is
1849 referring to). We should remove the first use and the remove the
1850 following set. */
1851 node->address_taken = 1;
1852 node = cgraph_function_or_thunk_node (node, NULL);
1853 node->address_taken = 1;
1854 }
1855
1856 /* Return local info for the compiled function. */
1857
1858 struct cgraph_local_info *
1859 cgraph_local_info (tree decl)
1860 {
1861 struct cgraph_node *node;
1862
1863 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1864 node = cgraph_get_node (decl);
1865 if (!node)
1866 return NULL;
1867 return &node->local;
1868 }
1869
1870 /* Return local info for the compiled function. */
1871
1872 struct cgraph_global_info *
1873 cgraph_global_info (tree decl)
1874 {
1875 struct cgraph_node *node;
1876
1877 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL && cgraph_global_info_ready);
1878 node = cgraph_get_node (decl);
1879 if (!node)
1880 return NULL;
1881 return &node->global;
1882 }
1883
1884 /* Return local info for the compiled function. */
1885
1886 struct cgraph_rtl_info *
1887 cgraph_rtl_info (tree decl)
1888 {
1889 struct cgraph_node *node;
1890
1891 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1892 node = cgraph_get_node (decl);
1893 if (!node
1894 || (decl != current_function_decl
1895 && !TREE_ASM_WRITTEN (node->decl)))
1896 return NULL;
1897 return &node->rtl;
1898 }
1899
1900 /* Return a string describing the failure REASON. */
1901
1902 const char*
1903 cgraph_inline_failed_string (cgraph_inline_failed_t reason)
1904 {
1905 #undef DEFCIFCODE
1906 #define DEFCIFCODE(code, type, string) string,
1907
1908 static const char *cif_string_table[CIF_N_REASONS] = {
1909 #include "cif-code.def"
1910 };
1911
1912 /* Signedness of an enum type is implementation defined, so cast it
1913 to unsigned before testing. */
1914 gcc_assert ((unsigned) reason < CIF_N_REASONS);
1915 return cif_string_table[reason];
1916 }
1917
1918 /* Return a type describing the failure REASON. */
1919
1920 cgraph_inline_failed_type_t
1921 cgraph_inline_failed_type (cgraph_inline_failed_t reason)
1922 {
1923 #undef DEFCIFCODE
1924 #define DEFCIFCODE(code, type, string) type,
1925
1926 static cgraph_inline_failed_type_t cif_type_table[CIF_N_REASONS] = {
1927 #include "cif-code.def"
1928 };
1929
1930 /* Signedness of an enum type is implementation defined, so cast it
1931 to unsigned before testing. */
1932 gcc_assert ((unsigned) reason < CIF_N_REASONS);
1933 return cif_type_table[reason];
1934 }
1935
1936 /* Names used to print out the availability enum. */
1937 const char * const cgraph_availability_names[] =
1938 {"unset", "not_available", "overwritable", "available", "local"};
1939
1940
1941 /* Dump call graph node NODE to file F. */
1942
1943 void
1944 dump_cgraph_node (FILE *f, struct cgraph_node *node)
1945 {
1946 struct cgraph_edge *edge;
1947 int indirect_calls_count = 0;
1948
1949 dump_symtab_base (f, node);
1950
1951 if (node->global.inlined_to)
1952 fprintf (f, " Function %s/%i is inline copy in %s/%i\n",
1953 xstrdup (node->name ()),
1954 node->order,
1955 xstrdup (node->global.inlined_to->name ()),
1956 node->global.inlined_to->order);
1957 if (node->clone_of)
1958 fprintf (f, " Clone of %s/%i\n",
1959 node->clone_of->asm_name (),
1960 node->clone_of->order);
1961 if (cgraph_function_flags_ready)
1962 fprintf (f, " Availability: %s\n",
1963 cgraph_availability_names [cgraph_function_body_availability (node)]);
1964
1965 if (node->profile_id)
1966 fprintf (f, " Profile id: %i\n",
1967 node->profile_id);
1968 fprintf (f, " First run: %i\n", node->tp_first_run);
1969 fprintf (f, " Function flags:");
1970 if (node->count)
1971 fprintf (f, " executed %"PRId64"x",
1972 (int64_t)node->count);
1973 if (node->origin)
1974 fprintf (f, " nested in: %s", node->origin->asm_name ());
1975 if (gimple_has_body_p (node->decl))
1976 fprintf (f, " body");
1977 if (node->process)
1978 fprintf (f, " process");
1979 if (node->local.local)
1980 fprintf (f, " local");
1981 if (node->local.redefined_extern_inline)
1982 fprintf (f, " redefined_extern_inline");
1983 if (node->only_called_at_startup)
1984 fprintf (f, " only_called_at_startup");
1985 if (node->only_called_at_exit)
1986 fprintf (f, " only_called_at_exit");
1987 if (node->tm_clone)
1988 fprintf (f, " tm_clone");
1989 if (DECL_STATIC_CONSTRUCTOR (node->decl))
1990 fprintf (f," static_constructor (priority:%i)", node->get_init_priority ());
1991 if (DECL_STATIC_DESTRUCTOR (node->decl))
1992 fprintf (f," static_destructor (priority:%i)", node->get_fini_priority ());
1993
1994 fprintf (f, "\n");
1995
1996 if (node->thunk.thunk_p)
1997 {
1998 fprintf (f, " Thunk");
1999 if (node->thunk.alias)
2000 fprintf (f, " of %s (asm: %s)",
2001 lang_hooks.decl_printable_name (node->thunk.alias, 2),
2002 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->thunk.alias)));
2003 fprintf (f, " fixed offset %i virtual value %i has "
2004 "virtual offset %i)\n",
2005 (int)node->thunk.fixed_offset,
2006 (int)node->thunk.virtual_value,
2007 (int)node->thunk.virtual_offset_p);
2008 }
2009 if (node->alias && node->thunk.alias
2010 && DECL_P (node->thunk.alias))
2011 {
2012 fprintf (f, " Alias of %s",
2013 lang_hooks.decl_printable_name (node->thunk.alias, 2));
2014 if (DECL_ASSEMBLER_NAME_SET_P (node->thunk.alias))
2015 fprintf (f, " (asm: %s)",
2016 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->thunk.alias)));
2017 fprintf (f, "\n");
2018 }
2019
2020 fprintf (f, " Called by: ");
2021
2022 for (edge = node->callers; edge; edge = edge->next_caller)
2023 {
2024 fprintf (f, "%s/%i ", edge->caller->asm_name (),
2025 edge->caller->order);
2026 if (edge->count)
2027 fprintf (f, "(%"PRId64"x) ",
2028 (int64_t)edge->count);
2029 if (edge->frequency)
2030 fprintf (f, "(%.2f per call) ",
2031 edge->frequency / (double)CGRAPH_FREQ_BASE);
2032 if (edge->speculative)
2033 fprintf (f, "(speculative) ");
2034 if (!edge->inline_failed)
2035 fprintf (f, "(inlined) ");
2036 if (edge->indirect_inlining_edge)
2037 fprintf (f, "(indirect_inlining) ");
2038 if (edge->can_throw_external)
2039 fprintf (f, "(can throw external) ");
2040 }
2041
2042 fprintf (f, "\n Calls: ");
2043 for (edge = node->callees; edge; edge = edge->next_callee)
2044 {
2045 fprintf (f, "%s/%i ", edge->callee->asm_name (),
2046 edge->callee->order);
2047 if (edge->speculative)
2048 fprintf (f, "(speculative) ");
2049 if (!edge->inline_failed)
2050 fprintf (f, "(inlined) ");
2051 if (edge->indirect_inlining_edge)
2052 fprintf (f, "(indirect_inlining) ");
2053 if (edge->count)
2054 fprintf (f, "(%"PRId64"x) ",
2055 (int64_t)edge->count);
2056 if (edge->frequency)
2057 fprintf (f, "(%.2f per call) ",
2058 edge->frequency / (double)CGRAPH_FREQ_BASE);
2059 if (edge->can_throw_external)
2060 fprintf (f, "(can throw external) ");
2061 }
2062 fprintf (f, "\n");
2063
2064 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
2065 indirect_calls_count++;
2066 if (indirect_calls_count)
2067 fprintf (f, " Has %i outgoing edges for indirect calls.\n",
2068 indirect_calls_count);
2069 }
2070
2071
2072 /* Dump call graph node NODE to stderr. */
2073
2074 DEBUG_FUNCTION void
2075 debug_cgraph_node (struct cgraph_node *node)
2076 {
2077 dump_cgraph_node (stderr, node);
2078 }
2079
2080
2081 /* Dump the callgraph to file F. */
2082
2083 void
2084 dump_cgraph (FILE *f)
2085 {
2086 struct cgraph_node *node;
2087
2088 fprintf (f, "callgraph:\n\n");
2089 FOR_EACH_FUNCTION (node)
2090 dump_cgraph_node (f, node);
2091 }
2092
2093
2094 /* Dump the call graph to stderr. */
2095
2096 DEBUG_FUNCTION void
2097 debug_cgraph (void)
2098 {
2099 dump_cgraph (stderr);
2100 }
2101
2102 /* Return true when the DECL can possibly be inlined. */
2103 bool
2104 cgraph_function_possibly_inlined_p (tree decl)
2105 {
2106 if (!cgraph_global_info_ready)
2107 return !DECL_UNINLINABLE (decl);
2108 return DECL_POSSIBLY_INLINED (decl);
2109 }
2110
2111 /* NODE is no longer nested function; update cgraph accordingly. */
2112 void
2113 cgraph_unnest_node (struct cgraph_node *node)
2114 {
2115 struct cgraph_node **node2 = &node->origin->nested;
2116 gcc_assert (node->origin);
2117
2118 while (*node2 != node)
2119 node2 = &(*node2)->next_nested;
2120 *node2 = node->next_nested;
2121 node->origin = NULL;
2122 }
2123
2124 /* Return function availability. See cgraph.h for description of individual
2125 return values. */
2126 enum availability
2127 cgraph_function_body_availability (struct cgraph_node *node)
2128 {
2129 enum availability avail;
2130 if (!node->analyzed)
2131 avail = AVAIL_NOT_AVAILABLE;
2132 else if (node->local.local)
2133 avail = AVAIL_LOCAL;
2134 else if (node->alias && node->weakref)
2135 cgraph_function_or_thunk_node (node, &avail);
2136 else if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (node->decl)))
2137 avail = AVAIL_OVERWRITABLE;
2138 else if (!node->externally_visible)
2139 avail = AVAIL_AVAILABLE;
2140 /* Inline functions are safe to be analyzed even if their symbol can
2141 be overwritten at runtime. It is not meaningful to enforce any sane
2142 behaviour on replacing inline function by different body. */
2143 else if (DECL_DECLARED_INLINE_P (node->decl))
2144 avail = AVAIL_AVAILABLE;
2145
2146 /* If the function can be overwritten, return OVERWRITABLE. Take
2147 care at least of two notable extensions - the COMDAT functions
2148 used to share template instantiations in C++ (this is symmetric
2149 to code cp_cannot_inline_tree_fn and probably shall be shared and
2150 the inlinability hooks completely eliminated).
2151
2152 ??? Does the C++ one definition rule allow us to always return
2153 AVAIL_AVAILABLE here? That would be good reason to preserve this
2154 bit. */
2155
2156 else if (decl_replaceable_p (node->decl)
2157 && !DECL_EXTERNAL (node->decl))
2158 avail = AVAIL_OVERWRITABLE;
2159 else avail = AVAIL_AVAILABLE;
2160
2161 return avail;
2162 }
2163
2164 /* Worker for cgraph_node_can_be_local_p. */
2165 static bool
2166 cgraph_node_cannot_be_local_p_1 (struct cgraph_node *node,
2167 void *data ATTRIBUTE_UNUSED)
2168 {
2169 return !(!node->force_output
2170 && ((DECL_COMDAT (node->decl)
2171 && !node->forced_by_abi
2172 && !symtab_used_from_object_file_p (node)
2173 && !node->same_comdat_group)
2174 || !node->externally_visible));
2175 }
2176
2177 /* Return true if NODE can be made local for API change.
2178 Extern inline functions and C++ COMDAT functions can be made local
2179 at the expense of possible code size growth if function is used in multiple
2180 compilation units. */
2181 bool
2182 cgraph_node_can_be_local_p (struct cgraph_node *node)
2183 {
2184 return (!node->address_taken
2185 && !cgraph_for_node_and_aliases (node,
2186 cgraph_node_cannot_be_local_p_1,
2187 NULL, true));
2188 }
2189
2190 /* Call calback on NODE, thunks and aliases associated to NODE.
2191 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
2192 skipped. */
2193
2194 bool
2195 cgraph_for_node_thunks_and_aliases (struct cgraph_node *node,
2196 bool (*callback) (struct cgraph_node *, void *),
2197 void *data,
2198 bool include_overwritable)
2199 {
2200 struct cgraph_edge *e;
2201 int i;
2202 struct ipa_ref *ref = NULL;
2203
2204 if (callback (node, data))
2205 return true;
2206 for (e = node->callers; e; e = e->next_caller)
2207 if (e->caller->thunk.thunk_p
2208 && (include_overwritable
2209 || cgraph_function_body_availability (e->caller) > AVAIL_OVERWRITABLE))
2210 if (cgraph_for_node_thunks_and_aliases (e->caller, callback, data,
2211 include_overwritable))
2212 return true;
2213 for (i = 0; node->iterate_referring (i, ref); i++)
2214 if (ref->use == IPA_REF_ALIAS)
2215 {
2216 struct cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2217 if (include_overwritable
2218 || cgraph_function_body_availability (alias) > AVAIL_OVERWRITABLE)
2219 if (cgraph_for_node_thunks_and_aliases (alias, callback, data,
2220 include_overwritable))
2221 return true;
2222 }
2223 return false;
2224 }
2225
2226 /* Call calback on NODE and aliases associated to NODE.
2227 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
2228 skipped. */
2229
2230 bool
2231 cgraph_for_node_and_aliases (struct cgraph_node *node,
2232 bool (*callback) (struct cgraph_node *, void *),
2233 void *data,
2234 bool include_overwritable)
2235 {
2236 int i;
2237 struct ipa_ref *ref = NULL;
2238
2239 if (callback (node, data))
2240 return true;
2241 for (i = 0; node->iterate_referring (i, ref); i++)
2242 if (ref->use == IPA_REF_ALIAS)
2243 {
2244 struct cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2245 if (include_overwritable
2246 || cgraph_function_body_availability (alias) > AVAIL_OVERWRITABLE)
2247 if (cgraph_for_node_and_aliases (alias, callback, data,
2248 include_overwritable))
2249 return true;
2250 }
2251 return false;
2252 }
2253
2254 /* Worker to bring NODE local. */
2255
2256 static bool
2257 cgraph_make_node_local_1 (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2258 {
2259 gcc_checking_assert (cgraph_node_can_be_local_p (node));
2260 if (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
2261 {
2262 symtab_make_decl_local (node->decl);
2263
2264 node->set_section (NULL);
2265 node->set_comdat_group (NULL);
2266 node->externally_visible = false;
2267 node->forced_by_abi = false;
2268 node->local.local = true;
2269 node->set_section (NULL);
2270 node->unique_name = (node->resolution == LDPR_PREVAILING_DEF_IRONLY
2271 || node->resolution == LDPR_PREVAILING_DEF_IRONLY_EXP);
2272 node->resolution = LDPR_PREVAILING_DEF_IRONLY;
2273 gcc_assert (cgraph_function_body_availability (node) == AVAIL_LOCAL);
2274 }
2275 return false;
2276 }
2277
2278 /* Bring NODE local. */
2279
2280 void
2281 cgraph_make_node_local (struct cgraph_node *node)
2282 {
2283 cgraph_for_node_thunks_and_aliases (node, cgraph_make_node_local_1,
2284 NULL, true);
2285 }
2286
2287 /* Worker to set nothrow flag. */
2288
2289 static bool
2290 cgraph_set_nothrow_flag_1 (struct cgraph_node *node, void *data)
2291 {
2292 struct cgraph_edge *e;
2293
2294 TREE_NOTHROW (node->decl) = data != NULL;
2295
2296 if (data != NULL)
2297 for (e = node->callers; e; e = e->next_caller)
2298 e->can_throw_external = false;
2299 return false;
2300 }
2301
2302 /* Set TREE_NOTHROW on NODE's decl and on aliases of NODE
2303 if any to NOTHROW. */
2304
2305 void
2306 cgraph_set_nothrow_flag (struct cgraph_node *node, bool nothrow)
2307 {
2308 cgraph_for_node_thunks_and_aliases (node, cgraph_set_nothrow_flag_1,
2309 (void *)(size_t)nothrow, false);
2310 }
2311
2312 /* Worker to set const flag. */
2313
2314 static bool
2315 cgraph_set_const_flag_1 (struct cgraph_node *node, void *data)
2316 {
2317 /* Static constructors and destructors without a side effect can be
2318 optimized out. */
2319 if (data && !((size_t)data & 2))
2320 {
2321 if (DECL_STATIC_CONSTRUCTOR (node->decl))
2322 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2323 if (DECL_STATIC_DESTRUCTOR (node->decl))
2324 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2325 }
2326 TREE_READONLY (node->decl) = data != NULL;
2327 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = ((size_t)data & 2) != 0;
2328 return false;
2329 }
2330
2331 /* Set TREE_READONLY on NODE's decl and on aliases of NODE
2332 if any to READONLY. */
2333
2334 void
2335 cgraph_set_const_flag (struct cgraph_node *node, bool readonly, bool looping)
2336 {
2337 cgraph_for_node_thunks_and_aliases (node, cgraph_set_const_flag_1,
2338 (void *)(size_t)(readonly + (int)looping * 2),
2339 false);
2340 }
2341
2342 /* Worker to set pure flag. */
2343
2344 static bool
2345 cgraph_set_pure_flag_1 (struct cgraph_node *node, void *data)
2346 {
2347 /* Static constructors and destructors without a side effect can be
2348 optimized out. */
2349 if (data && !((size_t)data & 2))
2350 {
2351 if (DECL_STATIC_CONSTRUCTOR (node->decl))
2352 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2353 if (DECL_STATIC_DESTRUCTOR (node->decl))
2354 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2355 }
2356 DECL_PURE_P (node->decl) = data != NULL;
2357 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = ((size_t)data & 2) != 0;
2358 return false;
2359 }
2360
2361 /* Set DECL_PURE_P on NODE's decl and on aliases of NODE
2362 if any to PURE. */
2363
2364 void
2365 cgraph_set_pure_flag (struct cgraph_node *node, bool pure, bool looping)
2366 {
2367 cgraph_for_node_thunks_and_aliases (node, cgraph_set_pure_flag_1,
2368 (void *)(size_t)(pure + (int)looping * 2),
2369 false);
2370 }
2371
2372 /* Return true when NODE can not return or throw and thus
2373 it is safe to ignore its side effects for IPA analysis. */
2374
2375 bool
2376 cgraph_node_cannot_return (struct cgraph_node *node)
2377 {
2378 int flags = flags_from_decl_or_type (node->decl);
2379 if (!flag_exceptions)
2380 return (flags & ECF_NORETURN) != 0;
2381 else
2382 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2383 == (ECF_NORETURN | ECF_NOTHROW));
2384 }
2385
2386 /* Return true when call of E can not lead to return from caller
2387 and thus it is safe to ignore its side effects for IPA analysis
2388 when computing side effects of the caller.
2389 FIXME: We could actually mark all edges that have no reaching
2390 patch to the exit block or throw to get better results. */
2391 bool
2392 cgraph_edge_cannot_lead_to_return (struct cgraph_edge *e)
2393 {
2394 if (cgraph_node_cannot_return (e->caller))
2395 return true;
2396 if (e->indirect_unknown_callee)
2397 {
2398 int flags = e->indirect_info->ecf_flags;
2399 if (!flag_exceptions)
2400 return (flags & ECF_NORETURN) != 0;
2401 else
2402 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2403 == (ECF_NORETURN | ECF_NOTHROW));
2404 }
2405 else
2406 return cgraph_node_cannot_return (e->callee);
2407 }
2408
2409 /* Return true when function NODE can be removed from callgraph
2410 if all direct calls are eliminated. */
2411
2412 bool
2413 cgraph_can_remove_if_no_direct_calls_and_refs_p (struct cgraph_node *node)
2414 {
2415 gcc_assert (!node->global.inlined_to);
2416 /* Extern inlines can always go, we will use the external definition. */
2417 if (DECL_EXTERNAL (node->decl))
2418 return true;
2419 /* When function is needed, we can not remove it. */
2420 if (node->force_output || node->used_from_other_partition)
2421 return false;
2422 if (DECL_STATIC_CONSTRUCTOR (node->decl)
2423 || DECL_STATIC_DESTRUCTOR (node->decl))
2424 return false;
2425 /* Only COMDAT functions can be removed if externally visible. */
2426 if (node->externally_visible
2427 && (!DECL_COMDAT (node->decl)
2428 || node->forced_by_abi
2429 || symtab_used_from_object_file_p (node)))
2430 return false;
2431 return true;
2432 }
2433
2434 /* Worker for cgraph_can_remove_if_no_direct_calls_p. */
2435
2436 static bool
2437 nonremovable_p (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2438 {
2439 return !cgraph_can_remove_if_no_direct_calls_and_refs_p (node);
2440 }
2441
2442 /* Return true when function NODE and its aliases can be removed from callgraph
2443 if all direct calls are eliminated. */
2444
2445 bool
2446 cgraph_can_remove_if_no_direct_calls_p (struct cgraph_node *node)
2447 {
2448 /* Extern inlines can always go, we will use the external definition. */
2449 if (DECL_EXTERNAL (node->decl))
2450 return true;
2451 if (node->address_taken)
2452 return false;
2453 return !cgraph_for_node_and_aliases (node, nonremovable_p, NULL, true);
2454 }
2455
2456 /* Worker for cgraph_can_remove_if_no_direct_calls_p. */
2457
2458 static bool
2459 used_from_object_file_p (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2460 {
2461 return symtab_used_from_object_file_p (node);
2462 }
2463
2464 /* Return true when function NODE can be expected to be removed
2465 from program when direct calls in this compilation unit are removed.
2466
2467 As a special case COMDAT functions are
2468 cgraph_can_remove_if_no_direct_calls_p while the are not
2469 cgraph_only_called_directly_p (it is possible they are called from other
2470 unit)
2471
2472 This function behaves as cgraph_only_called_directly_p because eliminating
2473 all uses of COMDAT function does not make it necessarily disappear from
2474 the program unless we are compiling whole program or we do LTO. In this
2475 case we know we win since dynamic linking will not really discard the
2476 linkonce section. */
2477
2478 bool
2479 cgraph_will_be_removed_from_program_if_no_direct_calls (struct cgraph_node *node)
2480 {
2481 gcc_assert (!node->global.inlined_to);
2482 if (cgraph_for_node_and_aliases (node, used_from_object_file_p, NULL, true))
2483 return false;
2484 if (!in_lto_p && !flag_whole_program)
2485 return cgraph_only_called_directly_p (node);
2486 else
2487 {
2488 if (DECL_EXTERNAL (node->decl))
2489 return true;
2490 return cgraph_can_remove_if_no_direct_calls_p (node);
2491 }
2492 }
2493
2494
2495 /* Worker for cgraph_only_called_directly_p. */
2496
2497 static bool
2498 cgraph_not_only_called_directly_p_1 (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2499 {
2500 return !cgraph_only_called_directly_or_aliased_p (node);
2501 }
2502
2503 /* Return true when function NODE and all its aliases are only called
2504 directly.
2505 i.e. it is not externally visible, address was not taken and
2506 it is not used in any other non-standard way. */
2507
2508 bool
2509 cgraph_only_called_directly_p (struct cgraph_node *node)
2510 {
2511 gcc_assert (cgraph_function_or_thunk_node (node, NULL) == node);
2512 return !cgraph_for_node_and_aliases (node, cgraph_not_only_called_directly_p_1,
2513 NULL, true);
2514 }
2515
2516
2517 /* Collect all callers of NODE. Worker for collect_callers_of_node. */
2518
2519 static bool
2520 collect_callers_of_node_1 (struct cgraph_node *node, void *data)
2521 {
2522 vec<cgraph_edge_p> *redirect_callers = (vec<cgraph_edge_p> *)data;
2523 struct cgraph_edge *cs;
2524 enum availability avail;
2525 cgraph_function_or_thunk_node (node, &avail);
2526
2527 if (avail > AVAIL_OVERWRITABLE)
2528 for (cs = node->callers; cs != NULL; cs = cs->next_caller)
2529 if (!cs->indirect_inlining_edge)
2530 redirect_callers->safe_push (cs);
2531 return false;
2532 }
2533
2534 /* Collect all callers of NODE and its aliases that are known to lead to NODE
2535 (i.e. are not overwritable). */
2536
2537 vec<cgraph_edge_p>
2538 collect_callers_of_node (struct cgraph_node *node)
2539 {
2540 vec<cgraph_edge_p> redirect_callers = vNULL;
2541 cgraph_for_node_and_aliases (node, collect_callers_of_node_1,
2542 &redirect_callers, false);
2543 return redirect_callers;
2544 }
2545
2546 /* Return TRUE if NODE2 a clone of NODE or is equivalent to it. */
2547
2548 static bool
2549 clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
2550 {
2551 bool skipped_thunk = false;
2552 node = cgraph_function_or_thunk_node (node, NULL);
2553 node2 = cgraph_function_or_thunk_node (node2, NULL);
2554
2555 /* There are no virtual clones of thunks so check former_clone_of or if we
2556 might have skipped thunks because this adjustments are no longer
2557 necessary. */
2558 while (node->thunk.thunk_p)
2559 {
2560 if (node2->former_clone_of == node->decl)
2561 return true;
2562 if (!node->thunk.this_adjusting)
2563 return false;
2564 node = cgraph_function_or_thunk_node (node->callees->callee, NULL);
2565 skipped_thunk = true;
2566 }
2567
2568 if (skipped_thunk)
2569 {
2570 if (!node2->clone.args_to_skip
2571 || !bitmap_bit_p (node2->clone.args_to_skip, 0))
2572 return false;
2573 if (node2->former_clone_of == node->decl)
2574 return true;
2575 else if (!node2->clone_of)
2576 return false;
2577 }
2578
2579 while (node != node2 && node2)
2580 node2 = node2->clone_of;
2581 return node2 != NULL;
2582 }
2583
2584 /* Verify edge E count and frequency. */
2585
2586 static bool
2587 verify_edge_count_and_frequency (struct cgraph_edge *e)
2588 {
2589 bool error_found = false;
2590 if (e->count < 0)
2591 {
2592 error ("caller edge count is negative");
2593 error_found = true;
2594 }
2595 if (e->frequency < 0)
2596 {
2597 error ("caller edge frequency is negative");
2598 error_found = true;
2599 }
2600 if (e->frequency > CGRAPH_FREQ_MAX)
2601 {
2602 error ("caller edge frequency is too large");
2603 error_found = true;
2604 }
2605 if (gimple_has_body_p (e->caller->decl)
2606 && !e->caller->global.inlined_to
2607 && !e->speculative
2608 /* FIXME: Inline-analysis sets frequency to 0 when edge is optimized out.
2609 Remove this once edges are actually removed from the function at that time. */
2610 && (e->frequency
2611 || (inline_edge_summary_vec.exists ()
2612 && ((inline_edge_summary_vec.length () <= (unsigned) e->uid)
2613 || !inline_edge_summary (e)->predicate)))
2614 && (e->frequency
2615 != compute_call_stmt_bb_frequency (e->caller->decl,
2616 gimple_bb (e->call_stmt))))
2617 {
2618 error ("caller edge frequency %i does not match BB frequency %i",
2619 e->frequency,
2620 compute_call_stmt_bb_frequency (e->caller->decl,
2621 gimple_bb (e->call_stmt)));
2622 error_found = true;
2623 }
2624 return error_found;
2625 }
2626
2627 /* Switch to THIS_CFUN if needed and print STMT to stderr. */
2628 static void
2629 cgraph_debug_gimple_stmt (struct function *this_cfun, gimple stmt)
2630 {
2631 bool fndecl_was_null = false;
2632 /* debug_gimple_stmt needs correct cfun */
2633 if (cfun != this_cfun)
2634 set_cfun (this_cfun);
2635 /* ...and an actual current_function_decl */
2636 if (!current_function_decl)
2637 {
2638 current_function_decl = this_cfun->decl;
2639 fndecl_was_null = true;
2640 }
2641 debug_gimple_stmt (stmt);
2642 if (fndecl_was_null)
2643 current_function_decl = NULL;
2644 }
2645
2646 /* Verify that call graph edge E corresponds to DECL from the associated
2647 statement. Return true if the verification should fail. */
2648
2649 static bool
2650 verify_edge_corresponds_to_fndecl (struct cgraph_edge *e, tree decl)
2651 {
2652 struct cgraph_node *node;
2653
2654 if (!decl || e->callee->global.inlined_to)
2655 return false;
2656 if (cgraph_state == CGRAPH_LTO_STREAMING)
2657 return false;
2658 node = cgraph_get_node (decl);
2659
2660 /* We do not know if a node from a different partition is an alias or what it
2661 aliases and therefore cannot do the former_clone_of check reliably. When
2662 body_removed is set, we have lost all information about what was alias or
2663 thunk of and also cannot proceed. */
2664 if (!node
2665 || node->body_removed
2666 || node->in_other_partition
2667 || e->callee->in_other_partition)
2668 return false;
2669
2670 /* Optimizers can redirect unreachable calls or calls triggering undefined
2671 behaviour to builtin_unreachable. */
2672 if (DECL_BUILT_IN_CLASS (e->callee->decl) == BUILT_IN_NORMAL
2673 && DECL_FUNCTION_CODE (e->callee->decl) == BUILT_IN_UNREACHABLE)
2674 return false;
2675 node = cgraph_function_or_thunk_node (node, NULL);
2676
2677 if (e->callee->former_clone_of != node->decl
2678 && (node != cgraph_function_or_thunk_node (e->callee, NULL))
2679 && !clone_of_p (node, e->callee))
2680 return true;
2681 else
2682 return false;
2683 }
2684
2685 /* Verify cgraph nodes of given cgraph node. */
2686 DEBUG_FUNCTION void
2687 verify_cgraph_node (struct cgraph_node *node)
2688 {
2689 struct cgraph_edge *e;
2690 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
2691 basic_block this_block;
2692 gimple_stmt_iterator gsi;
2693 bool error_found = false;
2694
2695 if (seen_error ())
2696 return;
2697
2698 timevar_push (TV_CGRAPH_VERIFY);
2699 error_found |= verify_symtab_base (node);
2700 for (e = node->callees; e; e = e->next_callee)
2701 if (e->aux)
2702 {
2703 error ("aux field set for edge %s->%s",
2704 identifier_to_locale (e->caller->name ()),
2705 identifier_to_locale (e->callee->name ()));
2706 error_found = true;
2707 }
2708 if (node->count < 0)
2709 {
2710 error ("execution count is negative");
2711 error_found = true;
2712 }
2713 if (node->global.inlined_to && node->same_comdat_group)
2714 {
2715 error ("inline clone in same comdat group list");
2716 error_found = true;
2717 }
2718 if (!node->definition && !node->in_other_partition && node->local.local)
2719 {
2720 error ("local symbols must be defined");
2721 error_found = true;
2722 }
2723 if (node->global.inlined_to && node->externally_visible)
2724 {
2725 error ("externally visible inline clone");
2726 error_found = true;
2727 }
2728 if (node->global.inlined_to && node->address_taken)
2729 {
2730 error ("inline clone with address taken");
2731 error_found = true;
2732 }
2733 if (node->global.inlined_to && node->force_output)
2734 {
2735 error ("inline clone is forced to output");
2736 error_found = true;
2737 }
2738 for (e = node->indirect_calls; e; e = e->next_callee)
2739 {
2740 if (e->aux)
2741 {
2742 error ("aux field set for indirect edge from %s",
2743 identifier_to_locale (e->caller->name ()));
2744 error_found = true;
2745 }
2746 if (!e->indirect_unknown_callee
2747 || !e->indirect_info)
2748 {
2749 error ("An indirect edge from %s is not marked as indirect or has "
2750 "associated indirect_info, the corresponding statement is: ",
2751 identifier_to_locale (e->caller->name ()));
2752 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
2753 error_found = true;
2754 }
2755 }
2756 bool check_comdat = symtab_comdat_local_p (node);
2757 for (e = node->callers; e; e = e->next_caller)
2758 {
2759 if (verify_edge_count_and_frequency (e))
2760 error_found = true;
2761 if (check_comdat
2762 && !symtab_in_same_comdat_p (e->caller, node))
2763 {
2764 error ("comdat-local function called by %s outside its comdat",
2765 identifier_to_locale (e->caller->name ()));
2766 error_found = true;
2767 }
2768 if (!e->inline_failed)
2769 {
2770 if (node->global.inlined_to
2771 != (e->caller->global.inlined_to
2772 ? e->caller->global.inlined_to : e->caller))
2773 {
2774 error ("inlined_to pointer is wrong");
2775 error_found = true;
2776 }
2777 if (node->callers->next_caller)
2778 {
2779 error ("multiple inline callers");
2780 error_found = true;
2781 }
2782 }
2783 else
2784 if (node->global.inlined_to)
2785 {
2786 error ("inlined_to pointer set for noninline callers");
2787 error_found = true;
2788 }
2789 }
2790 for (e = node->indirect_calls; e; e = e->next_callee)
2791 if (verify_edge_count_and_frequency (e))
2792 error_found = true;
2793 if (!node->callers && node->global.inlined_to)
2794 {
2795 error ("inlined_to pointer is set but no predecessors found");
2796 error_found = true;
2797 }
2798 if (node->global.inlined_to == node)
2799 {
2800 error ("inlined_to pointer refers to itself");
2801 error_found = true;
2802 }
2803
2804 if (node->clone_of)
2805 {
2806 struct cgraph_node *n;
2807 for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
2808 if (n == node)
2809 break;
2810 if (!n)
2811 {
2812 error ("node has wrong clone_of");
2813 error_found = true;
2814 }
2815 }
2816 if (node->clones)
2817 {
2818 struct cgraph_node *n;
2819 for (n = node->clones; n; n = n->next_sibling_clone)
2820 if (n->clone_of != node)
2821 break;
2822 if (n)
2823 {
2824 error ("node has wrong clone list");
2825 error_found = true;
2826 }
2827 }
2828 if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
2829 {
2830 error ("node is in clone list but it is not clone");
2831 error_found = true;
2832 }
2833 if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
2834 {
2835 error ("node has wrong prev_clone pointer");
2836 error_found = true;
2837 }
2838 if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
2839 {
2840 error ("double linked list of clones corrupted");
2841 error_found = true;
2842 }
2843
2844 if (node->analyzed && node->alias)
2845 {
2846 bool ref_found = false;
2847 int i;
2848 struct ipa_ref *ref = NULL;
2849
2850 if (node->callees)
2851 {
2852 error ("Alias has call edges");
2853 error_found = true;
2854 }
2855 for (i = 0; node->iterate_reference (i, ref); i++)
2856 if (ref->use != IPA_REF_ALIAS)
2857 {
2858 error ("Alias has non-alias reference");
2859 error_found = true;
2860 }
2861 else if (ref_found)
2862 {
2863 error ("Alias has more than one alias reference");
2864 error_found = true;
2865 }
2866 else
2867 ref_found = true;
2868 if (!ref_found)
2869 {
2870 error ("Analyzed alias has no reference");
2871 error_found = true;
2872 }
2873 }
2874 if (node->analyzed && node->thunk.thunk_p)
2875 {
2876 if (!node->callees)
2877 {
2878 error ("No edge out of thunk node");
2879 error_found = true;
2880 }
2881 else if (node->callees->next_callee)
2882 {
2883 error ("More than one edge out of thunk node");
2884 error_found = true;
2885 }
2886 if (gimple_has_body_p (node->decl))
2887 {
2888 error ("Thunk is not supposed to have body");
2889 error_found = true;
2890 }
2891 }
2892 else if (node->analyzed && gimple_has_body_p (node->decl)
2893 && !TREE_ASM_WRITTEN (node->decl)
2894 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
2895 && !flag_wpa)
2896 {
2897 if (this_cfun->cfg)
2898 {
2899 pointer_set_t *stmts = pointer_set_create ();
2900 int i;
2901 struct ipa_ref *ref = NULL;
2902
2903 /* Reach the trees by walking over the CFG, and note the
2904 enclosing basic-blocks in the call edges. */
2905 FOR_EACH_BB_FN (this_block, this_cfun)
2906 {
2907 for (gsi = gsi_start_phis (this_block);
2908 !gsi_end_p (gsi); gsi_next (&gsi))
2909 pointer_set_insert (stmts, gsi_stmt (gsi));
2910 for (gsi = gsi_start_bb (this_block);
2911 !gsi_end_p (gsi);
2912 gsi_next (&gsi))
2913 {
2914 gimple stmt = gsi_stmt (gsi);
2915 pointer_set_insert (stmts, stmt);
2916 if (is_gimple_call (stmt))
2917 {
2918 struct cgraph_edge *e = cgraph_edge (node, stmt);
2919 tree decl = gimple_call_fndecl (stmt);
2920 if (e)
2921 {
2922 if (e->aux)
2923 {
2924 error ("shared call_stmt:");
2925 cgraph_debug_gimple_stmt (this_cfun, stmt);
2926 error_found = true;
2927 }
2928 if (!e->indirect_unknown_callee)
2929 {
2930 if (verify_edge_corresponds_to_fndecl (e, decl))
2931 {
2932 error ("edge points to wrong declaration:");
2933 debug_tree (e->callee->decl);
2934 fprintf (stderr," Instead of:");
2935 debug_tree (decl);
2936 error_found = true;
2937 }
2938 }
2939 else if (decl)
2940 {
2941 error ("an indirect edge with unknown callee "
2942 "corresponding to a call_stmt with "
2943 "a known declaration:");
2944 error_found = true;
2945 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
2946 }
2947 e->aux = (void *)1;
2948 }
2949 else if (decl)
2950 {
2951 error ("missing callgraph edge for call stmt:");
2952 cgraph_debug_gimple_stmt (this_cfun, stmt);
2953 error_found = true;
2954 }
2955 }
2956 }
2957 }
2958 for (i = 0;
2959 node->iterate_reference (i, ref); i++)
2960 if (ref->stmt && !pointer_set_contains (stmts, ref->stmt))
2961 {
2962 error ("reference to dead statement");
2963 cgraph_debug_gimple_stmt (this_cfun, ref->stmt);
2964 error_found = true;
2965 }
2966 pointer_set_destroy (stmts);
2967 }
2968 else
2969 /* No CFG available?! */
2970 gcc_unreachable ();
2971
2972 for (e = node->callees; e; e = e->next_callee)
2973 {
2974 if (!e->aux)
2975 {
2976 error ("edge %s->%s has no corresponding call_stmt",
2977 identifier_to_locale (e->caller->name ()),
2978 identifier_to_locale (e->callee->name ()));
2979 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
2980 error_found = true;
2981 }
2982 e->aux = 0;
2983 }
2984 for (e = node->indirect_calls; e; e = e->next_callee)
2985 {
2986 if (!e->aux && !e->speculative)
2987 {
2988 error ("an indirect edge from %s has no corresponding call_stmt",
2989 identifier_to_locale (e->caller->name ()));
2990 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
2991 error_found = true;
2992 }
2993 e->aux = 0;
2994 }
2995 }
2996 if (error_found)
2997 {
2998 dump_cgraph_node (stderr, node);
2999 internal_error ("verify_cgraph_node failed");
3000 }
3001 timevar_pop (TV_CGRAPH_VERIFY);
3002 }
3003
3004 /* Verify whole cgraph structure. */
3005 DEBUG_FUNCTION void
3006 verify_cgraph (void)
3007 {
3008 struct cgraph_node *node;
3009
3010 if (seen_error ())
3011 return;
3012
3013 FOR_EACH_FUNCTION (node)
3014 verify_cgraph_node (node);
3015 }
3016
3017 /* Given NODE, walk the alias chain to return the function NODE is alias of.
3018 Walk through thunk, too.
3019 When AVAILABILITY is non-NULL, get minimal availability in the chain. */
3020
3021 struct cgraph_node *
3022 cgraph_function_node (struct cgraph_node *node, enum availability *availability)
3023 {
3024 do
3025 {
3026 node = cgraph_function_or_thunk_node (node, availability);
3027 if (node->thunk.thunk_p)
3028 {
3029 node = node->callees->callee;
3030 if (availability)
3031 {
3032 enum availability a;
3033 a = cgraph_function_body_availability (node);
3034 if (a < *availability)
3035 *availability = a;
3036 }
3037 node = cgraph_function_or_thunk_node (node, availability);
3038 }
3039 } while (node && node->thunk.thunk_p);
3040 return node;
3041 }
3042
3043 /* When doing LTO, read NODE's body from disk if it is not already present. */
3044
3045 bool
3046 cgraph_get_body (struct cgraph_node *node)
3047 {
3048 struct lto_file_decl_data *file_data;
3049 const char *data, *name;
3050 size_t len;
3051 tree decl = node->decl;
3052
3053 if (DECL_RESULT (decl))
3054 return false;
3055
3056 gcc_assert (in_lto_p);
3057
3058 file_data = node->lto_file_data;
3059 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
3060
3061 /* We may have renamed the declaration, e.g., a static function. */
3062 name = lto_get_decl_name_mapping (file_data, name);
3063
3064 data = lto_get_section_data (file_data, LTO_section_function_body,
3065 name, &len);
3066 if (!data)
3067 {
3068 dump_cgraph_node (stderr, node);
3069 fatal_error ("%s: section %s is missing",
3070 file_data->file_name,
3071 name);
3072 }
3073
3074 gcc_assert (DECL_STRUCT_FUNCTION (decl) == NULL);
3075
3076 lto_input_function_body (file_data, node, data);
3077 lto_stats.num_function_bodies++;
3078 lto_free_section_data (file_data, LTO_section_function_body, name,
3079 data, len);
3080 lto_free_function_in_decl_state_for_node (node);
3081 return true;
3082 }
3083
3084 /* Verify if the type of the argument matches that of the function
3085 declaration. If we cannot verify this or there is a mismatch,
3086 return false. */
3087
3088 static bool
3089 gimple_check_call_args (gimple stmt, tree fndecl, bool args_count_match)
3090 {
3091 tree parms, p;
3092 unsigned int i, nargs;
3093
3094 /* Calls to internal functions always match their signature. */
3095 if (gimple_call_internal_p (stmt))
3096 return true;
3097
3098 nargs = gimple_call_num_args (stmt);
3099
3100 /* Get argument types for verification. */
3101 if (fndecl)
3102 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3103 else
3104 parms = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
3105
3106 /* Verify if the type of the argument matches that of the function
3107 declaration. If we cannot verify this or there is a mismatch,
3108 return false. */
3109 if (fndecl && DECL_ARGUMENTS (fndecl))
3110 {
3111 for (i = 0, p = DECL_ARGUMENTS (fndecl);
3112 i < nargs;
3113 i++, p = DECL_CHAIN (p))
3114 {
3115 tree arg;
3116 /* We cannot distinguish a varargs function from the case
3117 of excess parameters, still deferring the inlining decision
3118 to the callee is possible. */
3119 if (!p)
3120 break;
3121 arg = gimple_call_arg (stmt, i);
3122 if (p == error_mark_node
3123 || DECL_ARG_TYPE (p) == error_mark_node
3124 || arg == error_mark_node
3125 || (!types_compatible_p (DECL_ARG_TYPE (p), TREE_TYPE (arg))
3126 && !fold_convertible_p (DECL_ARG_TYPE (p), arg)))
3127 return false;
3128 }
3129 if (args_count_match && p)
3130 return false;
3131 }
3132 else if (parms)
3133 {
3134 for (i = 0, p = parms; i < nargs; i++, p = TREE_CHAIN (p))
3135 {
3136 tree arg;
3137 /* If this is a varargs function defer inlining decision
3138 to callee. */
3139 if (!p)
3140 break;
3141 arg = gimple_call_arg (stmt, i);
3142 if (TREE_VALUE (p) == error_mark_node
3143 || arg == error_mark_node
3144 || TREE_CODE (TREE_VALUE (p)) == VOID_TYPE
3145 || (!types_compatible_p (TREE_VALUE (p), TREE_TYPE (arg))
3146 && !fold_convertible_p (TREE_VALUE (p), arg)))
3147 return false;
3148 }
3149 }
3150 else
3151 {
3152 if (nargs != 0)
3153 return false;
3154 }
3155 return true;
3156 }
3157
3158 /* Verify if the type of the argument and lhs of CALL_STMT matches
3159 that of the function declaration CALLEE. If ARGS_COUNT_MATCH is
3160 true, the arg count needs to be the same.
3161 If we cannot verify this or there is a mismatch, return false. */
3162
3163 bool
3164 gimple_check_call_matching_types (gimple call_stmt, tree callee,
3165 bool args_count_match)
3166 {
3167 tree lhs;
3168
3169 if ((DECL_RESULT (callee)
3170 && !DECL_BY_REFERENCE (DECL_RESULT (callee))
3171 && (lhs = gimple_call_lhs (call_stmt)) != NULL_TREE
3172 && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee)),
3173 TREE_TYPE (lhs))
3174 && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee)), lhs))
3175 || !gimple_check_call_args (call_stmt, callee, args_count_match))
3176 return false;
3177 return true;
3178 }
3179
3180 #include "gt-cgraph.h"