s390.c (s390_function_value): Rename to ...
[gcc.git] / gcc / cgraph.c
1 /* Callgraph handling code.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This file contains basic routines manipulating call graph
23
24 The callgraph:
25
26 The call-graph is data structure designed for intra-procedural optimization
27 but it is also used in non-unit-at-a-time compilation to allow easier code
28 sharing.
29
30 The call-graph consist of nodes and edges represented via linked lists.
31 Each function (external or not) corresponds to the unique node.
32
33 The mapping from declarations to call-graph nodes is done using hash table
34 based on DECL_UID. The call-graph nodes are created lazily using
35 cgraph_node function when called for unknown declaration.
36
37 The callgraph at the moment does not represent all indirect calls or calls
38 from other compilation units. Flag NEEDED is set for each node that may be
39 accessed in such an invisible way and it shall be considered an entry point
40 to the callgraph.
41
42 On the other hand, the callgraph currently does contain some edges for
43 indirect calls with unknown callees which can be accessed through
44 indirect_calls field of a node. It should be noted however that at the
45 moment only calls which are potential candidates for indirect inlining are
46 added there.
47
48 Interprocedural information:
49
50 Callgraph is place to store data needed for interprocedural optimization.
51 All data structures are divided into three components: local_info that
52 is produced while analyzing the function, global_info that is result
53 of global walking of the callgraph on the end of compilation and
54 rtl_info used by RTL backend to propagate data from already compiled
55 functions to their callers.
56
57 Moreover, each node has a uid which can be used to keep information in
58 on-the-side arrays. UIDs are reused and therefore reasonably dense.
59
60 Inlining plans:
61
62 The function inlining information is decided in advance and maintained
63 in the callgraph as so called inline plan.
64 For each inlined call, the callee's node is cloned to represent the
65 new function copy produced by inliner.
66 Each inlined call gets a unique corresponding clone node of the callee
67 and the data structure is updated while inlining is performed, so
68 the clones are eliminated and their callee edges redirected to the
69 caller.
70
71 Each edge has "inline_failed" field. When the field is set to NULL,
72 the call will be inlined. When it is non-NULL it contains a reason
73 why inlining wasn't performed. */
74
75 #include "config.h"
76 #include "system.h"
77 #include "coretypes.h"
78 #include "tm.h"
79 #include "tree.h"
80 #include "tree-inline.h"
81 #include "langhooks.h"
82 #include "hashtab.h"
83 #include "toplev.h"
84 #include "flags.h"
85 #include "ggc.h"
86 #include "debug.h"
87 #include "target.h"
88 #include "basic-block.h"
89 #include "cgraph.h"
90 #include "output.h"
91 #include "intl.h"
92 #include "gimple.h"
93 #include "tree-dump.h"
94 #include "tree-flow.h"
95 #include "value-prof.h"
96 #include "except.h"
97 #include "diagnostic-core.h"
98 #include "rtl.h"
99 #include "ipa-utils.h"
100 #include "lto-streamer.h"
101
102 const char * const ld_plugin_symbol_resolution_names[]=
103 {
104 "",
105 "undef",
106 "prevailing_def",
107 "prevailing_def_ironly",
108 "preempted_reg",
109 "preempted_ir",
110 "resolved_ir",
111 "resolved_exec",
112 "resolved_dyn"
113 };
114
115 static void cgraph_node_remove_callers (struct cgraph_node *node);
116 static inline void cgraph_edge_remove_caller (struct cgraph_edge *e);
117 static inline void cgraph_edge_remove_callee (struct cgraph_edge *e);
118
119 /* Hash table used to convert declarations into nodes. */
120 static GTY((param_is (struct cgraph_node))) htab_t cgraph_hash;
121 /* Hash table used to convert assembler names into nodes. */
122 static GTY((param_is (struct cgraph_node))) htab_t assembler_name_hash;
123
124 /* The linked list of cgraph nodes. */
125 struct cgraph_node *cgraph_nodes;
126
127 /* Queue of cgraph nodes scheduled to be lowered. */
128 struct cgraph_node *cgraph_nodes_queue;
129
130 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
131 secondary queue used during optimization to accommodate passes that
132 may generate new functions that need to be optimized and expanded. */
133 struct cgraph_node *cgraph_new_nodes;
134
135 /* Number of nodes in existence. */
136 int cgraph_n_nodes;
137
138 /* Maximal uid used in cgraph nodes. */
139 int cgraph_max_uid;
140
141 /* Maximal uid used in cgraph edges. */
142 int cgraph_edge_max_uid;
143
144 /* Maximal pid used for profiling */
145 int cgraph_max_pid;
146
147 /* Set when whole unit has been analyzed so we can access global info. */
148 bool cgraph_global_info_ready = false;
149
150 /* What state callgraph is in right now. */
151 enum cgraph_state cgraph_state = CGRAPH_STATE_CONSTRUCTION;
152
153 /* Set when the cgraph is fully build and the basic flags are computed. */
154 bool cgraph_function_flags_ready = false;
155
156 /* Linked list of cgraph asm nodes. */
157 struct cgraph_asm_node *cgraph_asm_nodes;
158
159 /* Last node in cgraph_asm_nodes. */
160 static GTY(()) struct cgraph_asm_node *cgraph_asm_last_node;
161
162 /* The order index of the next cgraph node to be created. This is
163 used so that we can sort the cgraph nodes in order by when we saw
164 them, to support -fno-toplevel-reorder. */
165 int cgraph_order;
166
167 /* List of hooks triggered on cgraph_edge events. */
168 struct cgraph_edge_hook_list {
169 cgraph_edge_hook hook;
170 void *data;
171 struct cgraph_edge_hook_list *next;
172 };
173
174 /* List of hooks triggered on cgraph_node events. */
175 struct cgraph_node_hook_list {
176 cgraph_node_hook hook;
177 void *data;
178 struct cgraph_node_hook_list *next;
179 };
180
181 /* List of hooks triggered on events involving two cgraph_edges. */
182 struct cgraph_2edge_hook_list {
183 cgraph_2edge_hook hook;
184 void *data;
185 struct cgraph_2edge_hook_list *next;
186 };
187
188 /* List of hooks triggered on events involving two cgraph_nodes. */
189 struct cgraph_2node_hook_list {
190 cgraph_2node_hook hook;
191 void *data;
192 struct cgraph_2node_hook_list *next;
193 };
194
195 /* List of hooks triggered when an edge is removed. */
196 struct cgraph_edge_hook_list *first_cgraph_edge_removal_hook;
197 /* List of hooks triggered when a node is removed. */
198 struct cgraph_node_hook_list *first_cgraph_node_removal_hook;
199 /* List of hooks triggered when an edge is duplicated. */
200 struct cgraph_2edge_hook_list *first_cgraph_edge_duplicated_hook;
201 /* List of hooks triggered when a node is duplicated. */
202 struct cgraph_2node_hook_list *first_cgraph_node_duplicated_hook;
203 /* List of hooks triggered when an function is inserted. */
204 struct cgraph_node_hook_list *first_cgraph_function_insertion_hook;
205
206 /* Head of a linked list of unused (freed) call graph nodes.
207 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
208 static GTY(()) struct cgraph_node *free_nodes;
209 /* Head of a linked list of unused (freed) call graph edges.
210 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
211 static GTY(()) struct cgraph_edge *free_edges;
212
213 /* Macros to access the next item in the list of free cgraph nodes and
214 edges. */
215 #define NEXT_FREE_NODE(NODE) (NODE)->next
216 #define NEXT_FREE_EDGE(EDGE) (EDGE)->prev_caller
217
218 /* Register HOOK to be called with DATA on each removed edge. */
219 struct cgraph_edge_hook_list *
220 cgraph_add_edge_removal_hook (cgraph_edge_hook hook, void *data)
221 {
222 struct cgraph_edge_hook_list *entry;
223 struct cgraph_edge_hook_list **ptr = &first_cgraph_edge_removal_hook;
224
225 entry = (struct cgraph_edge_hook_list *) xmalloc (sizeof (*entry));
226 entry->hook = hook;
227 entry->data = data;
228 entry->next = NULL;
229 while (*ptr)
230 ptr = &(*ptr)->next;
231 *ptr = entry;
232 return entry;
233 }
234
235 /* Remove ENTRY from the list of hooks called on removing edges. */
236 void
237 cgraph_remove_edge_removal_hook (struct cgraph_edge_hook_list *entry)
238 {
239 struct cgraph_edge_hook_list **ptr = &first_cgraph_edge_removal_hook;
240
241 while (*ptr != entry)
242 ptr = &(*ptr)->next;
243 *ptr = entry->next;
244 free (entry);
245 }
246
247 /* Call all edge removal hooks. */
248 static void
249 cgraph_call_edge_removal_hooks (struct cgraph_edge *e)
250 {
251 struct cgraph_edge_hook_list *entry = first_cgraph_edge_removal_hook;
252 while (entry)
253 {
254 entry->hook (e, entry->data);
255 entry = entry->next;
256 }
257 }
258
259 /* Register HOOK to be called with DATA on each removed node. */
260 struct cgraph_node_hook_list *
261 cgraph_add_node_removal_hook (cgraph_node_hook hook, void *data)
262 {
263 struct cgraph_node_hook_list *entry;
264 struct cgraph_node_hook_list **ptr = &first_cgraph_node_removal_hook;
265
266 entry = (struct cgraph_node_hook_list *) xmalloc (sizeof (*entry));
267 entry->hook = hook;
268 entry->data = data;
269 entry->next = NULL;
270 while (*ptr)
271 ptr = &(*ptr)->next;
272 *ptr = entry;
273 return entry;
274 }
275
276 /* Remove ENTRY from the list of hooks called on removing nodes. */
277 void
278 cgraph_remove_node_removal_hook (struct cgraph_node_hook_list *entry)
279 {
280 struct cgraph_node_hook_list **ptr = &first_cgraph_node_removal_hook;
281
282 while (*ptr != entry)
283 ptr = &(*ptr)->next;
284 *ptr = entry->next;
285 free (entry);
286 }
287
288 /* Call all node removal hooks. */
289 static void
290 cgraph_call_node_removal_hooks (struct cgraph_node *node)
291 {
292 struct cgraph_node_hook_list *entry = first_cgraph_node_removal_hook;
293 while (entry)
294 {
295 entry->hook (node, entry->data);
296 entry = entry->next;
297 }
298 }
299
300 /* Register HOOK to be called with DATA on each inserted node. */
301 struct cgraph_node_hook_list *
302 cgraph_add_function_insertion_hook (cgraph_node_hook hook, void *data)
303 {
304 struct cgraph_node_hook_list *entry;
305 struct cgraph_node_hook_list **ptr = &first_cgraph_function_insertion_hook;
306
307 entry = (struct cgraph_node_hook_list *) xmalloc (sizeof (*entry));
308 entry->hook = hook;
309 entry->data = data;
310 entry->next = NULL;
311 while (*ptr)
312 ptr = &(*ptr)->next;
313 *ptr = entry;
314 return entry;
315 }
316
317 /* Remove ENTRY from the list of hooks called on inserted nodes. */
318 void
319 cgraph_remove_function_insertion_hook (struct cgraph_node_hook_list *entry)
320 {
321 struct cgraph_node_hook_list **ptr = &first_cgraph_function_insertion_hook;
322
323 while (*ptr != entry)
324 ptr = &(*ptr)->next;
325 *ptr = entry->next;
326 free (entry);
327 }
328
329 /* Call all node insertion hooks. */
330 void
331 cgraph_call_function_insertion_hooks (struct cgraph_node *node)
332 {
333 struct cgraph_node_hook_list *entry = first_cgraph_function_insertion_hook;
334 while (entry)
335 {
336 entry->hook (node, entry->data);
337 entry = entry->next;
338 }
339 }
340
341 /* Register HOOK to be called with DATA on each duplicated edge. */
342 struct cgraph_2edge_hook_list *
343 cgraph_add_edge_duplication_hook (cgraph_2edge_hook hook, void *data)
344 {
345 struct cgraph_2edge_hook_list *entry;
346 struct cgraph_2edge_hook_list **ptr = &first_cgraph_edge_duplicated_hook;
347
348 entry = (struct cgraph_2edge_hook_list *) xmalloc (sizeof (*entry));
349 entry->hook = hook;
350 entry->data = data;
351 entry->next = NULL;
352 while (*ptr)
353 ptr = &(*ptr)->next;
354 *ptr = entry;
355 return entry;
356 }
357
358 /* Remove ENTRY from the list of hooks called on duplicating edges. */
359 void
360 cgraph_remove_edge_duplication_hook (struct cgraph_2edge_hook_list *entry)
361 {
362 struct cgraph_2edge_hook_list **ptr = &first_cgraph_edge_duplicated_hook;
363
364 while (*ptr != entry)
365 ptr = &(*ptr)->next;
366 *ptr = entry->next;
367 free (entry);
368 }
369
370 /* Call all edge duplication hooks. */
371 static void
372 cgraph_call_edge_duplication_hooks (struct cgraph_edge *cs1,
373 struct cgraph_edge *cs2)
374 {
375 struct cgraph_2edge_hook_list *entry = first_cgraph_edge_duplicated_hook;
376 while (entry)
377 {
378 entry->hook (cs1, cs2, entry->data);
379 entry = entry->next;
380 }
381 }
382
383 /* Register HOOK to be called with DATA on each duplicated node. */
384 struct cgraph_2node_hook_list *
385 cgraph_add_node_duplication_hook (cgraph_2node_hook hook, void *data)
386 {
387 struct cgraph_2node_hook_list *entry;
388 struct cgraph_2node_hook_list **ptr = &first_cgraph_node_duplicated_hook;
389
390 entry = (struct cgraph_2node_hook_list *) xmalloc (sizeof (*entry));
391 entry->hook = hook;
392 entry->data = data;
393 entry->next = NULL;
394 while (*ptr)
395 ptr = &(*ptr)->next;
396 *ptr = entry;
397 return entry;
398 }
399
400 /* Remove ENTRY from the list of hooks called on duplicating nodes. */
401 void
402 cgraph_remove_node_duplication_hook (struct cgraph_2node_hook_list *entry)
403 {
404 struct cgraph_2node_hook_list **ptr = &first_cgraph_node_duplicated_hook;
405
406 while (*ptr != entry)
407 ptr = &(*ptr)->next;
408 *ptr = entry->next;
409 free (entry);
410 }
411
412 /* Call all node duplication hooks. */
413 static void
414 cgraph_call_node_duplication_hooks (struct cgraph_node *node1,
415 struct cgraph_node *node2)
416 {
417 struct cgraph_2node_hook_list *entry = first_cgraph_node_duplicated_hook;
418 while (entry)
419 {
420 entry->hook (node1, node2, entry->data);
421 entry = entry->next;
422 }
423 }
424
425 /* Returns a hash code for P. */
426
427 static hashval_t
428 hash_node (const void *p)
429 {
430 const struct cgraph_node *n = (const struct cgraph_node *) p;
431 return (hashval_t) DECL_UID (n->decl);
432 }
433
434
435 /* Returns nonzero if P1 and P2 are equal. */
436
437 static int
438 eq_node (const void *p1, const void *p2)
439 {
440 const struct cgraph_node *n1 = (const struct cgraph_node *) p1;
441 const struct cgraph_node *n2 = (const struct cgraph_node *) p2;
442 return DECL_UID (n1->decl) == DECL_UID (n2->decl);
443 }
444
445 /* Allocate new callgraph node. */
446
447 static inline struct cgraph_node *
448 cgraph_allocate_node (void)
449 {
450 struct cgraph_node *node;
451
452 if (free_nodes)
453 {
454 node = free_nodes;
455 free_nodes = NEXT_FREE_NODE (node);
456 }
457 else
458 {
459 node = ggc_alloc_cleared_cgraph_node ();
460 node->uid = cgraph_max_uid++;
461 }
462
463 return node;
464 }
465
466 /* Allocate new callgraph node and insert it into basic data structures. */
467
468 static struct cgraph_node *
469 cgraph_create_node (void)
470 {
471 struct cgraph_node *node = cgraph_allocate_node ();
472
473 node->next = cgraph_nodes;
474 node->pid = -1;
475 node->order = cgraph_order++;
476 if (cgraph_nodes)
477 cgraph_nodes->previous = node;
478 node->previous = NULL;
479 node->global.estimated_growth = INT_MIN;
480 node->frequency = NODE_FREQUENCY_NORMAL;
481 node->count_materialization_scale = REG_BR_PROB_BASE;
482 ipa_empty_ref_list (&node->ref_list);
483 cgraph_nodes = node;
484 cgraph_n_nodes++;
485 return node;
486 }
487
488 /* Return cgraph node assigned to DECL. Create new one when needed. */
489
490 struct cgraph_node *
491 cgraph_node (tree decl)
492 {
493 struct cgraph_node key, *node, **slot;
494
495 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
496
497 if (!cgraph_hash)
498 cgraph_hash = htab_create_ggc (10, hash_node, eq_node, NULL);
499
500 key.decl = decl;
501
502 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
503
504 if (*slot)
505 {
506 node = *slot;
507 if (node->same_body_alias)
508 node = node->same_body;
509 return node;
510 }
511
512 node = cgraph_create_node ();
513 node->decl = decl;
514 *slot = node;
515 if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
516 {
517 node->origin = cgraph_node (DECL_CONTEXT (decl));
518 node->next_nested = node->origin->nested;
519 node->origin->nested = node;
520 }
521 if (assembler_name_hash)
522 {
523 void **aslot;
524 tree name = DECL_ASSEMBLER_NAME (decl);
525
526 aslot = htab_find_slot_with_hash (assembler_name_hash, name,
527 decl_assembler_name_hash (name),
528 INSERT);
529 /* We can have multiple declarations with same assembler name. For C++
530 it is __builtin_strlen and strlen, for instance. Do we need to
531 record them all? Original implementation marked just first one
532 so lets hope for the best. */
533 if (*aslot == NULL)
534 *aslot = node;
535 }
536 return node;
537 }
538
539 /* Mark ALIAS as an alias to DECL. */
540
541 static struct cgraph_node *
542 cgraph_same_body_alias_1 (tree alias, tree decl)
543 {
544 struct cgraph_node key, *alias_node, *decl_node, **slot;
545
546 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
547 gcc_assert (TREE_CODE (alias) == FUNCTION_DECL);
548 decl_node = cgraph_node (decl);
549
550 key.decl = alias;
551
552 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
553
554 /* If the cgraph_node has been already created, fail. */
555 if (*slot)
556 return NULL;
557
558 alias_node = cgraph_allocate_node ();
559 alias_node->decl = alias;
560 alias_node->same_body_alias = 1;
561 alias_node->same_body = decl_node;
562 alias_node->previous = NULL;
563 if (decl_node->same_body)
564 decl_node->same_body->previous = alias_node;
565 alias_node->next = decl_node->same_body;
566 alias_node->thunk.alias = decl;
567 decl_node->same_body = alias_node;
568 *slot = alias_node;
569 return alias_node;
570 }
571
572 /* Attempt to mark ALIAS as an alias to DECL. Return alias node if successful
573 and NULL otherwise.
574 Same body aliases are output whenever the body of DECL is output,
575 and cgraph_node (ALIAS) transparently returns cgraph_node (DECL). */
576
577 struct cgraph_node *
578 cgraph_same_body_alias (tree alias, tree decl)
579 {
580 #ifndef ASM_OUTPUT_DEF
581 /* If aliases aren't supported by the assembler, fail. */
582 return NULL;
583 #endif
584
585 /*gcc_assert (!assembler_name_hash);*/
586
587 return cgraph_same_body_alias_1 (alias, decl);
588 }
589
590 /* Add thunk alias into callgraph. The alias declaration is ALIAS and it
591 aliases DECL with an adjustments made into the first parameter.
592 See comments in thunk_adjust for detail on the parameters. */
593
594 struct cgraph_node *
595 cgraph_add_thunk (tree alias, tree decl, bool this_adjusting,
596 HOST_WIDE_INT fixed_offset, HOST_WIDE_INT virtual_value,
597 tree virtual_offset,
598 tree real_alias)
599 {
600 struct cgraph_node *node = cgraph_get_node (alias);
601
602 if (node)
603 {
604 gcc_assert (node->local.finalized);
605 gcc_assert (!node->same_body);
606 cgraph_remove_node (node);
607 }
608
609 node = cgraph_same_body_alias_1 (alias, decl);
610 gcc_assert (node);
611 gcc_checking_assert (!virtual_offset
612 || tree_int_cst_equal (virtual_offset,
613 size_int (virtual_value)));
614 node->thunk.fixed_offset = fixed_offset;
615 node->thunk.this_adjusting = this_adjusting;
616 node->thunk.virtual_value = virtual_value;
617 node->thunk.virtual_offset_p = virtual_offset != NULL;
618 node->thunk.alias = real_alias;
619 node->thunk.thunk_p = true;
620 return node;
621 }
622
623 /* Returns the cgraph node assigned to DECL or NULL if no cgraph node
624 is assigned. */
625
626 struct cgraph_node *
627 cgraph_get_node_or_alias (const_tree decl)
628 {
629 struct cgraph_node key, *node = NULL, **slot;
630
631 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
632
633 if (!cgraph_hash)
634 return NULL;
635
636 key.decl = CONST_CAST2 (tree, const_tree, decl);
637
638 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key,
639 NO_INSERT);
640
641 if (slot && *slot)
642 node = *slot;
643 return node;
644 }
645
646 /* Returns the cgraph node assigned to DECL or NULL if no cgraph node
647 is assigned. */
648
649 struct cgraph_node *
650 cgraph_get_node (const_tree decl)
651 {
652 struct cgraph_node key, *node = NULL, **slot;
653
654 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
655
656 if (!cgraph_hash)
657 return NULL;
658
659 key.decl = CONST_CAST2 (tree, const_tree, decl);
660
661 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key,
662 NO_INSERT);
663
664 if (slot && *slot)
665 {
666 node = *slot;
667 if (node->same_body_alias)
668 node = node->same_body;
669 }
670 return node;
671 }
672
673 /* Insert already constructed node into hashtable. */
674
675 void
676 cgraph_insert_node_to_hashtable (struct cgraph_node *node)
677 {
678 struct cgraph_node **slot;
679
680 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, node, INSERT);
681
682 gcc_assert (!*slot);
683 *slot = node;
684 }
685
686 /* Returns a hash code for P. */
687
688 static hashval_t
689 hash_node_by_assembler_name (const void *p)
690 {
691 const struct cgraph_node *n = (const struct cgraph_node *) p;
692 return (hashval_t) decl_assembler_name_hash (DECL_ASSEMBLER_NAME (n->decl));
693 }
694
695 /* Returns nonzero if P1 and P2 are equal. */
696
697 static int
698 eq_assembler_name (const void *p1, const void *p2)
699 {
700 const struct cgraph_node *n1 = (const struct cgraph_node *) p1;
701 const_tree name = (const_tree)p2;
702 return (decl_assembler_name_equal (n1->decl, name));
703 }
704
705 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
706 Return NULL if there's no such node. */
707
708 struct cgraph_node *
709 cgraph_node_for_asm (tree asmname)
710 {
711 struct cgraph_node *node;
712 void **slot;
713
714 if (!assembler_name_hash)
715 {
716 assembler_name_hash =
717 htab_create_ggc (10, hash_node_by_assembler_name, eq_assembler_name,
718 NULL);
719 for (node = cgraph_nodes; node; node = node->next)
720 if (!node->global.inlined_to)
721 {
722 tree name = DECL_ASSEMBLER_NAME (node->decl);
723 slot = htab_find_slot_with_hash (assembler_name_hash, name,
724 decl_assembler_name_hash (name),
725 INSERT);
726 /* We can have multiple declarations with same assembler name. For C++
727 it is __builtin_strlen and strlen, for instance. Do we need to
728 record them all? Original implementation marked just first one
729 so lets hope for the best. */
730 if (!*slot)
731 *slot = node;
732 if (node->same_body)
733 {
734 struct cgraph_node *alias;
735
736 for (alias = node->same_body; alias; alias = alias->next)
737 {
738 hashval_t hash;
739 name = DECL_ASSEMBLER_NAME (alias->decl);
740 hash = decl_assembler_name_hash (name);
741 slot = htab_find_slot_with_hash (assembler_name_hash, name,
742 hash, INSERT);
743 if (!*slot)
744 *slot = alias;
745 }
746 }
747 }
748 }
749
750 slot = htab_find_slot_with_hash (assembler_name_hash, asmname,
751 decl_assembler_name_hash (asmname),
752 NO_INSERT);
753
754 if (slot)
755 {
756 node = (struct cgraph_node *) *slot;
757 if (node->same_body_alias)
758 node = node->same_body;
759 return node;
760 }
761 return NULL;
762 }
763
764 /* Returns a hash value for X (which really is a die_struct). */
765
766 static hashval_t
767 edge_hash (const void *x)
768 {
769 return htab_hash_pointer (((const struct cgraph_edge *) x)->call_stmt);
770 }
771
772 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
773
774 static int
775 edge_eq (const void *x, const void *y)
776 {
777 return ((const struct cgraph_edge *) x)->call_stmt == y;
778 }
779
780 /* Add call graph edge E to call site hash of its caller. */
781
782 static inline void
783 cgraph_add_edge_to_call_site_hash (struct cgraph_edge *e)
784 {
785 void **slot;
786 slot = htab_find_slot_with_hash (e->caller->call_site_hash,
787 e->call_stmt,
788 htab_hash_pointer (e->call_stmt),
789 INSERT);
790 gcc_assert (!*slot);
791 *slot = e;
792 }
793
794 /* Return the callgraph edge representing the GIMPLE_CALL statement
795 CALL_STMT. */
796
797 struct cgraph_edge *
798 cgraph_edge (struct cgraph_node *node, gimple call_stmt)
799 {
800 struct cgraph_edge *e, *e2;
801 int n = 0;
802
803 if (node->call_site_hash)
804 return (struct cgraph_edge *)
805 htab_find_with_hash (node->call_site_hash, call_stmt,
806 htab_hash_pointer (call_stmt));
807
808 /* This loop may turn out to be performance problem. In such case adding
809 hashtables into call nodes with very many edges is probably best
810 solution. It is not good idea to add pointer into CALL_EXPR itself
811 because we want to make possible having multiple cgraph nodes representing
812 different clones of the same body before the body is actually cloned. */
813 for (e = node->callees; e; e = e->next_callee)
814 {
815 if (e->call_stmt == call_stmt)
816 break;
817 n++;
818 }
819
820 if (!e)
821 for (e = node->indirect_calls; e; e = e->next_callee)
822 {
823 if (e->call_stmt == call_stmt)
824 break;
825 n++;
826 }
827
828 if (n > 100)
829 {
830 node->call_site_hash = htab_create_ggc (120, edge_hash, edge_eq, NULL);
831 for (e2 = node->callees; e2; e2 = e2->next_callee)
832 cgraph_add_edge_to_call_site_hash (e2);
833 for (e2 = node->indirect_calls; e2; e2 = e2->next_callee)
834 cgraph_add_edge_to_call_site_hash (e2);
835 }
836
837 return e;
838 }
839
840
841 /* Change field call_stmt of edge E to NEW_STMT. */
842
843 void
844 cgraph_set_call_stmt (struct cgraph_edge *e, gimple new_stmt)
845 {
846 tree decl;
847
848 if (e->caller->call_site_hash)
849 {
850 htab_remove_elt_with_hash (e->caller->call_site_hash,
851 e->call_stmt,
852 htab_hash_pointer (e->call_stmt));
853 }
854
855 e->call_stmt = new_stmt;
856 if (e->indirect_unknown_callee
857 && (decl = gimple_call_fndecl (new_stmt)))
858 {
859 /* Constant propagation (and possibly also inlining?) can turn an
860 indirect call into a direct one. */
861 struct cgraph_node *new_callee = cgraph_node (decl);
862
863 cgraph_make_edge_direct (e, new_callee, 0);
864 }
865
866 push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
867 e->can_throw_external = stmt_can_throw_external (new_stmt);
868 pop_cfun ();
869 if (e->caller->call_site_hash)
870 cgraph_add_edge_to_call_site_hash (e);
871 }
872
873 /* Like cgraph_set_call_stmt but walk the clone tree and update all
874 clones sharing the same function body. */
875
876 void
877 cgraph_set_call_stmt_including_clones (struct cgraph_node *orig,
878 gimple old_stmt, gimple new_stmt)
879 {
880 struct cgraph_node *node;
881 struct cgraph_edge *edge = cgraph_edge (orig, old_stmt);
882
883 if (edge)
884 cgraph_set_call_stmt (edge, new_stmt);
885
886 node = orig->clones;
887 if (node)
888 while (node != orig)
889 {
890 struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
891 if (edge)
892 cgraph_set_call_stmt (edge, new_stmt);
893 if (node->clones)
894 node = node->clones;
895 else if (node->next_sibling_clone)
896 node = node->next_sibling_clone;
897 else
898 {
899 while (node != orig && !node->next_sibling_clone)
900 node = node->clone_of;
901 if (node != orig)
902 node = node->next_sibling_clone;
903 }
904 }
905 }
906
907 /* Like cgraph_create_edge walk the clone tree and update all clones sharing
908 same function body. If clones already have edge for OLD_STMT; only
909 update the edge same way as cgraph_set_call_stmt_including_clones does.
910
911 TODO: COUNT and LOOP_DEPTH should be properly distributed based on relative
912 frequencies of the clones. */
913
914 void
915 cgraph_create_edge_including_clones (struct cgraph_node *orig,
916 struct cgraph_node *callee,
917 gimple old_stmt,
918 gimple stmt, gcov_type count,
919 int freq, int loop_depth,
920 cgraph_inline_failed_t reason)
921 {
922 struct cgraph_node *node;
923 struct cgraph_edge *edge;
924
925 if (!cgraph_edge (orig, stmt))
926 {
927 edge = cgraph_create_edge (orig, callee, stmt, count, freq, loop_depth);
928 edge->inline_failed = reason;
929 }
930
931 node = orig->clones;
932 if (node)
933 while (node != orig)
934 {
935 struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
936
937 /* It is possible that clones already contain the edge while
938 master didn't. Either we promoted indirect call into direct
939 call in the clone or we are processing clones of unreachable
940 master where edges has been removed. */
941 if (edge)
942 cgraph_set_call_stmt (edge, stmt);
943 else if (!cgraph_edge (node, stmt))
944 {
945 edge = cgraph_create_edge (node, callee, stmt, count,
946 freq, loop_depth);
947 edge->inline_failed = reason;
948 }
949
950 if (node->clones)
951 node = node->clones;
952 else if (node->next_sibling_clone)
953 node = node->next_sibling_clone;
954 else
955 {
956 while (node != orig && !node->next_sibling_clone)
957 node = node->clone_of;
958 if (node != orig)
959 node = node->next_sibling_clone;
960 }
961 }
962 }
963
964 /* Give initial reasons why inlining would fail on EDGE. This gets either
965 nullified or usually overwritten by more precise reasons later. */
966
967 static void
968 initialize_inline_failed (struct cgraph_edge *e)
969 {
970 struct cgraph_node *callee = e->callee;
971
972 if (e->indirect_unknown_callee)
973 e->inline_failed = CIF_INDIRECT_UNKNOWN_CALL;
974 else if (!callee->analyzed)
975 e->inline_failed = CIF_BODY_NOT_AVAILABLE;
976 else if (callee->local.redefined_extern_inline)
977 e->inline_failed = CIF_REDEFINED_EXTERN_INLINE;
978 else if (!callee->local.inlinable)
979 e->inline_failed = CIF_FUNCTION_NOT_INLINABLE;
980 else if (e->call_stmt && gimple_call_cannot_inline_p (e->call_stmt))
981 e->inline_failed = CIF_MISMATCHED_ARGUMENTS;
982 else
983 e->inline_failed = CIF_FUNCTION_NOT_CONSIDERED;
984 }
985
986 /* Allocate a cgraph_edge structure and fill it with data according to the
987 parameters of which only CALLEE can be NULL (when creating an indirect call
988 edge). */
989
990 static struct cgraph_edge *
991 cgraph_create_edge_1 (struct cgraph_node *caller, struct cgraph_node *callee,
992 gimple call_stmt, gcov_type count, int freq, int nest)
993 {
994 struct cgraph_edge *edge;
995
996 /* LTO does not actually have access to the call_stmt since these
997 have not been loaded yet. */
998 if (call_stmt)
999 {
1000 /* This is a rather expensive check possibly triggering
1001 construction of call stmt hashtable. */
1002 gcc_checking_assert (!cgraph_edge (caller, call_stmt));
1003
1004 gcc_assert (is_gimple_call (call_stmt));
1005 }
1006
1007 if (free_edges)
1008 {
1009 edge = free_edges;
1010 free_edges = NEXT_FREE_EDGE (edge);
1011 }
1012 else
1013 {
1014 edge = ggc_alloc_cgraph_edge ();
1015 edge->uid = cgraph_edge_max_uid++;
1016 }
1017
1018 edge->aux = NULL;
1019 edge->caller = caller;
1020 edge->callee = callee;
1021 edge->prev_caller = NULL;
1022 edge->next_caller = NULL;
1023 edge->prev_callee = NULL;
1024 edge->next_callee = NULL;
1025
1026 edge->count = count;
1027 gcc_assert (count >= 0);
1028 edge->frequency = freq;
1029 gcc_assert (freq >= 0);
1030 gcc_assert (freq <= CGRAPH_FREQ_MAX);
1031 edge->loop_nest = nest;
1032
1033 edge->call_stmt = call_stmt;
1034 push_cfun (DECL_STRUCT_FUNCTION (caller->decl));
1035 edge->can_throw_external
1036 = call_stmt ? stmt_can_throw_external (call_stmt) : false;
1037 pop_cfun ();
1038 edge->call_stmt_cannot_inline_p =
1039 (call_stmt ? gimple_call_cannot_inline_p (call_stmt) : false);
1040 if (call_stmt && caller->call_site_hash)
1041 cgraph_add_edge_to_call_site_hash (edge);
1042
1043 edge->indirect_info = NULL;
1044 edge->indirect_inlining_edge = 0;
1045
1046 return edge;
1047 }
1048
1049 /* Create edge from CALLER to CALLEE in the cgraph. */
1050
1051 struct cgraph_edge *
1052 cgraph_create_edge (struct cgraph_node *caller, struct cgraph_node *callee,
1053 gimple call_stmt, gcov_type count, int freq, int nest)
1054 {
1055 struct cgraph_edge *edge = cgraph_create_edge_1 (caller, callee, call_stmt,
1056 count, freq, nest);
1057
1058 edge->indirect_unknown_callee = 0;
1059 initialize_inline_failed (edge);
1060
1061 edge->next_caller = callee->callers;
1062 if (callee->callers)
1063 callee->callers->prev_caller = edge;
1064 edge->next_callee = caller->callees;
1065 if (caller->callees)
1066 caller->callees->prev_callee = edge;
1067 caller->callees = edge;
1068 callee->callers = edge;
1069
1070 return edge;
1071 }
1072
1073 /* Allocate cgraph_indirect_call_info and set its fields to default values. */
1074
1075 struct cgraph_indirect_call_info *
1076 cgraph_allocate_init_indirect_info (void)
1077 {
1078 struct cgraph_indirect_call_info *ii;
1079
1080 ii = ggc_alloc_cleared_cgraph_indirect_call_info ();
1081 ii->param_index = -1;
1082 return ii;
1083 }
1084
1085 /* Create an indirect edge with a yet-undetermined callee where the call
1086 statement destination is a formal parameter of the caller with index
1087 PARAM_INDEX. */
1088
1089 struct cgraph_edge *
1090 cgraph_create_indirect_edge (struct cgraph_node *caller, gimple call_stmt,
1091 int ecf_flags,
1092 gcov_type count, int freq, int nest)
1093 {
1094 struct cgraph_edge *edge = cgraph_create_edge_1 (caller, NULL, call_stmt,
1095 count, freq, nest);
1096
1097 edge->indirect_unknown_callee = 1;
1098 initialize_inline_failed (edge);
1099
1100 edge->indirect_info = cgraph_allocate_init_indirect_info ();
1101 edge->indirect_info->ecf_flags = ecf_flags;
1102
1103 edge->next_callee = caller->indirect_calls;
1104 if (caller->indirect_calls)
1105 caller->indirect_calls->prev_callee = edge;
1106 caller->indirect_calls = edge;
1107
1108 return edge;
1109 }
1110
1111 /* Remove the edge E from the list of the callers of the callee. */
1112
1113 static inline void
1114 cgraph_edge_remove_callee (struct cgraph_edge *e)
1115 {
1116 gcc_assert (!e->indirect_unknown_callee);
1117 if (e->prev_caller)
1118 e->prev_caller->next_caller = e->next_caller;
1119 if (e->next_caller)
1120 e->next_caller->prev_caller = e->prev_caller;
1121 if (!e->prev_caller)
1122 e->callee->callers = e->next_caller;
1123 }
1124
1125 /* Remove the edge E from the list of the callees of the caller. */
1126
1127 static inline void
1128 cgraph_edge_remove_caller (struct cgraph_edge *e)
1129 {
1130 if (e->prev_callee)
1131 e->prev_callee->next_callee = e->next_callee;
1132 if (e->next_callee)
1133 e->next_callee->prev_callee = e->prev_callee;
1134 if (!e->prev_callee)
1135 {
1136 if (e->indirect_unknown_callee)
1137 e->caller->indirect_calls = e->next_callee;
1138 else
1139 e->caller->callees = e->next_callee;
1140 }
1141 if (e->caller->call_site_hash)
1142 htab_remove_elt_with_hash (e->caller->call_site_hash,
1143 e->call_stmt,
1144 htab_hash_pointer (e->call_stmt));
1145 }
1146
1147 /* Put the edge onto the free list. */
1148
1149 static void
1150 cgraph_free_edge (struct cgraph_edge *e)
1151 {
1152 int uid = e->uid;
1153
1154 /* Clear out the edge so we do not dangle pointers. */
1155 memset (e, 0, sizeof (*e));
1156 e->uid = uid;
1157 NEXT_FREE_EDGE (e) = free_edges;
1158 free_edges = e;
1159 }
1160
1161 /* Remove the edge E in the cgraph. */
1162
1163 void
1164 cgraph_remove_edge (struct cgraph_edge *e)
1165 {
1166 /* Call all edge removal hooks. */
1167 cgraph_call_edge_removal_hooks (e);
1168
1169 if (!e->indirect_unknown_callee)
1170 /* Remove from callers list of the callee. */
1171 cgraph_edge_remove_callee (e);
1172
1173 /* Remove from callees list of the callers. */
1174 cgraph_edge_remove_caller (e);
1175
1176 /* Put the edge onto the free list. */
1177 cgraph_free_edge (e);
1178 }
1179
1180 /* Set callee of call graph edge E and add it to the corresponding set of
1181 callers. */
1182
1183 static void
1184 cgraph_set_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
1185 {
1186 e->prev_caller = NULL;
1187 if (n->callers)
1188 n->callers->prev_caller = e;
1189 e->next_caller = n->callers;
1190 n->callers = e;
1191 e->callee = n;
1192 }
1193
1194 /* Redirect callee of E to N. The function does not update underlying
1195 call expression. */
1196
1197 void
1198 cgraph_redirect_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
1199 {
1200 /* Remove from callers list of the current callee. */
1201 cgraph_edge_remove_callee (e);
1202
1203 /* Insert to callers list of the new callee. */
1204 cgraph_set_edge_callee (e, n);
1205 }
1206
1207 /* Make an indirect EDGE with an unknown callee an ordinary edge leading to
1208 CALLEE. DELTA is an integer constant that is to be added to the this
1209 pointer (first parameter) to compensate for skipping a thunk adjustment. */
1210
1211 void
1212 cgraph_make_edge_direct (struct cgraph_edge *edge, struct cgraph_node *callee,
1213 HOST_WIDE_INT delta)
1214 {
1215 edge->indirect_unknown_callee = 0;
1216 edge->indirect_info->thunk_delta = delta;
1217
1218 /* Get the edge out of the indirect edge list. */
1219 if (edge->prev_callee)
1220 edge->prev_callee->next_callee = edge->next_callee;
1221 if (edge->next_callee)
1222 edge->next_callee->prev_callee = edge->prev_callee;
1223 if (!edge->prev_callee)
1224 edge->caller->indirect_calls = edge->next_callee;
1225
1226 /* Put it into the normal callee list */
1227 edge->prev_callee = NULL;
1228 edge->next_callee = edge->caller->callees;
1229 if (edge->caller->callees)
1230 edge->caller->callees->prev_callee = edge;
1231 edge->caller->callees = edge;
1232
1233 /* Insert to callers list of the new callee. */
1234 cgraph_set_edge_callee (edge, callee);
1235
1236 /* We need to re-determine the inlining status of the edge. */
1237 initialize_inline_failed (edge);
1238 }
1239
1240
1241 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1242 OLD_STMT changed into NEW_STMT. OLD_CALL is gimple_call_fndecl
1243 of OLD_STMT if it was previously call statement. */
1244
1245 static void
1246 cgraph_update_edges_for_call_stmt_node (struct cgraph_node *node,
1247 gimple old_stmt, tree old_call, gimple new_stmt)
1248 {
1249 tree new_call = (is_gimple_call (new_stmt)) ? gimple_call_fndecl (new_stmt) : 0;
1250
1251 /* We are seeing indirect calls, then there is nothing to update. */
1252 if (!new_call && !old_call)
1253 return;
1254 /* See if we turned indirect call into direct call or folded call to one builtin
1255 into different builtin. */
1256 if (old_call != new_call)
1257 {
1258 struct cgraph_edge *e = cgraph_edge (node, old_stmt);
1259 struct cgraph_edge *ne = NULL;
1260 gcov_type count;
1261 int frequency;
1262 int loop_nest;
1263
1264 if (e)
1265 {
1266 /* See if the edge is already there and has the correct callee. It
1267 might be so because of indirect inlining has already updated
1268 it. We also might've cloned and redirected the edge. */
1269 if (new_call && e->callee)
1270 {
1271 struct cgraph_node *callee = e->callee;
1272 while (callee)
1273 {
1274 if (callee->decl == new_call
1275 || callee->former_clone_of == new_call)
1276 return;
1277 callee = callee->clone_of;
1278 }
1279 }
1280
1281 /* Otherwise remove edge and create new one; we can't simply redirect
1282 since function has changed, so inline plan and other information
1283 attached to edge is invalid. */
1284 count = e->count;
1285 frequency = e->frequency;
1286 loop_nest = e->loop_nest;
1287 cgraph_remove_edge (e);
1288 }
1289 else
1290 {
1291 /* We are seeing new direct call; compute profile info based on BB. */
1292 basic_block bb = gimple_bb (new_stmt);
1293 count = bb->count;
1294 frequency = compute_call_stmt_bb_frequency (current_function_decl,
1295 bb);
1296 loop_nest = bb->loop_depth;
1297 }
1298
1299 if (new_call)
1300 {
1301 ne = cgraph_create_edge (node, cgraph_node (new_call),
1302 new_stmt, count, frequency,
1303 loop_nest);
1304 gcc_assert (ne->inline_failed);
1305 }
1306 }
1307 /* We only updated the call stmt; update pointer in cgraph edge.. */
1308 else if (old_stmt != new_stmt)
1309 cgraph_set_call_stmt (cgraph_edge (node, old_stmt), new_stmt);
1310 }
1311
1312 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1313 OLD_STMT changed into NEW_STMT. OLD_DECL is gimple_call_fndecl
1314 of OLD_STMT before it was updated (updating can happen inplace). */
1315
1316 void
1317 cgraph_update_edges_for_call_stmt (gimple old_stmt, tree old_decl, gimple new_stmt)
1318 {
1319 struct cgraph_node *orig = cgraph_node (cfun->decl);
1320 struct cgraph_node *node;
1321
1322 cgraph_update_edges_for_call_stmt_node (orig, old_stmt, old_decl, new_stmt);
1323 if (orig->clones)
1324 for (node = orig->clones; node != orig;)
1325 {
1326 cgraph_update_edges_for_call_stmt_node (node, old_stmt, old_decl, new_stmt);
1327 if (node->clones)
1328 node = node->clones;
1329 else if (node->next_sibling_clone)
1330 node = node->next_sibling_clone;
1331 else
1332 {
1333 while (node != orig && !node->next_sibling_clone)
1334 node = node->clone_of;
1335 if (node != orig)
1336 node = node->next_sibling_clone;
1337 }
1338 }
1339 }
1340
1341
1342 /* Remove all callees from the node. */
1343
1344 void
1345 cgraph_node_remove_callees (struct cgraph_node *node)
1346 {
1347 struct cgraph_edge *e, *f;
1348
1349 /* It is sufficient to remove the edges from the lists of callers of
1350 the callees. The callee list of the node can be zapped with one
1351 assignment. */
1352 for (e = node->callees; e; e = f)
1353 {
1354 f = e->next_callee;
1355 cgraph_call_edge_removal_hooks (e);
1356 if (!e->indirect_unknown_callee)
1357 cgraph_edge_remove_callee (e);
1358 cgraph_free_edge (e);
1359 }
1360 for (e = node->indirect_calls; e; e = f)
1361 {
1362 f = e->next_callee;
1363 cgraph_call_edge_removal_hooks (e);
1364 if (!e->indirect_unknown_callee)
1365 cgraph_edge_remove_callee (e);
1366 cgraph_free_edge (e);
1367 }
1368 node->indirect_calls = NULL;
1369 node->callees = NULL;
1370 if (node->call_site_hash)
1371 {
1372 htab_delete (node->call_site_hash);
1373 node->call_site_hash = NULL;
1374 }
1375 }
1376
1377 /* Remove all callers from the node. */
1378
1379 static void
1380 cgraph_node_remove_callers (struct cgraph_node *node)
1381 {
1382 struct cgraph_edge *e, *f;
1383
1384 /* It is sufficient to remove the edges from the lists of callees of
1385 the callers. The caller list of the node can be zapped with one
1386 assignment. */
1387 for (e = node->callers; e; e = f)
1388 {
1389 f = e->next_caller;
1390 cgraph_call_edge_removal_hooks (e);
1391 cgraph_edge_remove_caller (e);
1392 cgraph_free_edge (e);
1393 }
1394 node->callers = NULL;
1395 }
1396
1397 /* Release memory used to represent body of function NODE. */
1398
1399 void
1400 cgraph_release_function_body (struct cgraph_node *node)
1401 {
1402 if (DECL_STRUCT_FUNCTION (node->decl))
1403 {
1404 tree old_decl = current_function_decl;
1405 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
1406 if (cfun->gimple_df)
1407 {
1408 current_function_decl = node->decl;
1409 delete_tree_ssa ();
1410 delete_tree_cfg_annotations ();
1411 cfun->eh = NULL;
1412 current_function_decl = old_decl;
1413 }
1414 if (cfun->cfg)
1415 {
1416 gcc_assert (dom_computed[0] == DOM_NONE);
1417 gcc_assert (dom_computed[1] == DOM_NONE);
1418 clear_edges ();
1419 }
1420 if (cfun->value_histograms)
1421 free_histograms ();
1422 gcc_assert (!current_loops);
1423 pop_cfun();
1424 gimple_set_body (node->decl, NULL);
1425 VEC_free (ipa_opt_pass, heap,
1426 node->ipa_transforms_to_apply);
1427 /* Struct function hangs a lot of data that would leak if we didn't
1428 removed all pointers to it. */
1429 ggc_free (DECL_STRUCT_FUNCTION (node->decl));
1430 DECL_STRUCT_FUNCTION (node->decl) = NULL;
1431 }
1432 DECL_SAVED_TREE (node->decl) = NULL;
1433 /* If the node is abstract and needed, then do not clear DECL_INITIAL
1434 of its associated function function declaration because it's
1435 needed to emit debug info later. */
1436 if (!node->abstract_and_needed)
1437 DECL_INITIAL (node->decl) = error_mark_node;
1438 }
1439
1440 /* Remove same body alias node. */
1441
1442 void
1443 cgraph_remove_same_body_alias (struct cgraph_node *node)
1444 {
1445 void **slot;
1446 int uid = node->uid;
1447
1448 gcc_assert (node->same_body_alias);
1449 if (node->previous)
1450 node->previous->next = node->next;
1451 else
1452 node->same_body->same_body = node->next;
1453 if (node->next)
1454 node->next->previous = node->previous;
1455 node->next = NULL;
1456 node->previous = NULL;
1457 slot = htab_find_slot (cgraph_hash, node, NO_INSERT);
1458 if (*slot == node)
1459 htab_clear_slot (cgraph_hash, slot);
1460 if (assembler_name_hash)
1461 {
1462 tree name = DECL_ASSEMBLER_NAME (node->decl);
1463 slot = htab_find_slot_with_hash (assembler_name_hash, name,
1464 decl_assembler_name_hash (name),
1465 NO_INSERT);
1466 if (slot && *slot == node)
1467 htab_clear_slot (assembler_name_hash, slot);
1468 }
1469
1470 /* Clear out the node to NULL all pointers and add the node to the free
1471 list. */
1472 memset (node, 0, sizeof(*node));
1473 node->uid = uid;
1474 NEXT_FREE_NODE (node) = free_nodes;
1475 free_nodes = node;
1476 }
1477
1478 /* Remove the node from cgraph. */
1479
1480 void
1481 cgraph_remove_node (struct cgraph_node *node)
1482 {
1483 void **slot;
1484 bool kill_body = false;
1485 struct cgraph_node *n;
1486 int uid = node->uid;
1487
1488 cgraph_call_node_removal_hooks (node);
1489 cgraph_node_remove_callers (node);
1490 cgraph_node_remove_callees (node);
1491 ipa_remove_all_references (&node->ref_list);
1492 ipa_remove_all_refering (&node->ref_list);
1493 VEC_free (ipa_opt_pass, heap,
1494 node->ipa_transforms_to_apply);
1495
1496 /* Incremental inlining access removed nodes stored in the postorder list.
1497 */
1498 node->needed = node->reachable = false;
1499 for (n = node->nested; n; n = n->next_nested)
1500 n->origin = NULL;
1501 node->nested = NULL;
1502 if (node->origin)
1503 {
1504 struct cgraph_node **node2 = &node->origin->nested;
1505
1506 while (*node2 != node)
1507 node2 = &(*node2)->next_nested;
1508 *node2 = node->next_nested;
1509 }
1510 if (node->previous)
1511 node->previous->next = node->next;
1512 else
1513 cgraph_nodes = node->next;
1514 if (node->next)
1515 node->next->previous = node->previous;
1516 node->next = NULL;
1517 node->previous = NULL;
1518 slot = htab_find_slot (cgraph_hash, node, NO_INSERT);
1519 if (*slot == node)
1520 {
1521 struct cgraph_node *next_inline_clone;
1522
1523 for (next_inline_clone = node->clones;
1524 next_inline_clone && next_inline_clone->decl != node->decl;
1525 next_inline_clone = next_inline_clone->next_sibling_clone)
1526 ;
1527
1528 /* If there is inline clone of the node being removed, we need
1529 to put it into the position of removed node and reorganize all
1530 other clones to be based on it. */
1531 if (next_inline_clone)
1532 {
1533 struct cgraph_node *n;
1534 struct cgraph_node *new_clones;
1535
1536 *slot = next_inline_clone;
1537
1538 /* Unlink inline clone from the list of clones of removed node. */
1539 if (next_inline_clone->next_sibling_clone)
1540 next_inline_clone->next_sibling_clone->prev_sibling_clone
1541 = next_inline_clone->prev_sibling_clone;
1542 if (next_inline_clone->prev_sibling_clone)
1543 {
1544 gcc_assert (node->clones != next_inline_clone);
1545 next_inline_clone->prev_sibling_clone->next_sibling_clone
1546 = next_inline_clone->next_sibling_clone;
1547 }
1548 else
1549 {
1550 gcc_assert (node->clones == next_inline_clone);
1551 node->clones = next_inline_clone->next_sibling_clone;
1552 }
1553
1554 new_clones = node->clones;
1555 node->clones = NULL;
1556
1557 /* Copy clone info. */
1558 next_inline_clone->clone = node->clone;
1559
1560 /* Now place it into clone tree at same level at NODE. */
1561 next_inline_clone->clone_of = node->clone_of;
1562 next_inline_clone->prev_sibling_clone = NULL;
1563 next_inline_clone->next_sibling_clone = NULL;
1564 if (node->clone_of)
1565 {
1566 if (node->clone_of->clones)
1567 node->clone_of->clones->prev_sibling_clone = next_inline_clone;
1568 next_inline_clone->next_sibling_clone = node->clone_of->clones;
1569 node->clone_of->clones = next_inline_clone;
1570 }
1571
1572 /* Merge the clone list. */
1573 if (new_clones)
1574 {
1575 if (!next_inline_clone->clones)
1576 next_inline_clone->clones = new_clones;
1577 else
1578 {
1579 n = next_inline_clone->clones;
1580 while (n->next_sibling_clone)
1581 n = n->next_sibling_clone;
1582 n->next_sibling_clone = new_clones;
1583 new_clones->prev_sibling_clone = n;
1584 }
1585 }
1586
1587 /* Update clone_of pointers. */
1588 n = new_clones;
1589 while (n)
1590 {
1591 n->clone_of = next_inline_clone;
1592 n = n->next_sibling_clone;
1593 }
1594 }
1595 else
1596 {
1597 htab_clear_slot (cgraph_hash, slot);
1598 kill_body = true;
1599 }
1600
1601 }
1602 if (node->prev_sibling_clone)
1603 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
1604 else if (node->clone_of)
1605 node->clone_of->clones = node->next_sibling_clone;
1606 if (node->next_sibling_clone)
1607 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
1608 if (node->clones)
1609 {
1610 struct cgraph_node *n, *next;
1611
1612 if (node->clone_of)
1613 {
1614 for (n = node->clones; n->next_sibling_clone; n = n->next_sibling_clone)
1615 n->clone_of = node->clone_of;
1616 n->clone_of = node->clone_of;
1617 n->next_sibling_clone = node->clone_of->clones;
1618 if (node->clone_of->clones)
1619 node->clone_of->clones->prev_sibling_clone = n;
1620 node->clone_of->clones = node->clones;
1621 }
1622 else
1623 {
1624 /* We are removing node with clones. this makes clones inconsistent,
1625 but assume they will be removed subsequently and just keep clone
1626 tree intact. This can happen in unreachable function removal since
1627 we remove unreachable functions in random order, not by bottom-up
1628 walk of clone trees. */
1629 for (n = node->clones; n; n = next)
1630 {
1631 next = n->next_sibling_clone;
1632 n->next_sibling_clone = NULL;
1633 n->prev_sibling_clone = NULL;
1634 n->clone_of = NULL;
1635 }
1636 }
1637 }
1638
1639 while (node->same_body)
1640 cgraph_remove_same_body_alias (node->same_body);
1641
1642 if (node->same_comdat_group)
1643 {
1644 struct cgraph_node *prev;
1645 for (prev = node->same_comdat_group;
1646 prev->same_comdat_group != node;
1647 prev = prev->same_comdat_group)
1648 ;
1649 if (node->same_comdat_group == prev)
1650 prev->same_comdat_group = NULL;
1651 else
1652 prev->same_comdat_group = node->same_comdat_group;
1653 node->same_comdat_group = NULL;
1654 }
1655
1656 /* While all the clones are removed after being proceeded, the function
1657 itself is kept in the cgraph even after it is compiled. Check whether
1658 we are done with this body and reclaim it proactively if this is the case.
1659 */
1660 if (!kill_body && *slot)
1661 {
1662 struct cgraph_node *n = (struct cgraph_node *) *slot;
1663 if (!n->clones && !n->clone_of && !n->global.inlined_to
1664 && (cgraph_global_info_ready
1665 && (TREE_ASM_WRITTEN (n->decl) || DECL_EXTERNAL (n->decl)
1666 || n->in_other_partition)))
1667 kill_body = true;
1668 }
1669 if (assembler_name_hash)
1670 {
1671 tree name = DECL_ASSEMBLER_NAME (node->decl);
1672 slot = htab_find_slot_with_hash (assembler_name_hash, name,
1673 decl_assembler_name_hash (name),
1674 NO_INSERT);
1675 /* Inline clones are not hashed. */
1676 if (slot && *slot == node)
1677 htab_clear_slot (assembler_name_hash, slot);
1678 }
1679
1680 if (kill_body)
1681 cgraph_release_function_body (node);
1682 node->decl = NULL;
1683 if (node->call_site_hash)
1684 {
1685 htab_delete (node->call_site_hash);
1686 node->call_site_hash = NULL;
1687 }
1688 cgraph_n_nodes--;
1689
1690 /* Clear out the node to NULL all pointers and add the node to the free
1691 list. */
1692 memset (node, 0, sizeof(*node));
1693 node->uid = uid;
1694 NEXT_FREE_NODE (node) = free_nodes;
1695 free_nodes = node;
1696 }
1697
1698 /* Remove the node from cgraph. */
1699
1700 void
1701 cgraph_remove_node_and_inline_clones (struct cgraph_node *node)
1702 {
1703 struct cgraph_edge *e, *next;
1704 for (e = node->callees; e; e = next)
1705 {
1706 next = e->next_callee;
1707 if (!e->inline_failed)
1708 cgraph_remove_node_and_inline_clones (e->callee);
1709 }
1710 cgraph_remove_node (node);
1711 }
1712
1713 /* Notify finalize_compilation_unit that given node is reachable. */
1714
1715 void
1716 cgraph_mark_reachable_node (struct cgraph_node *node)
1717 {
1718 if (!node->reachable && node->local.finalized)
1719 {
1720 if (cgraph_global_info_ready)
1721 {
1722 /* Verify that function does not appear to be needed out of blue
1723 during the optimization process. This can happen for extern
1724 inlines when bodies was removed after inlining. */
1725 gcc_assert ((node->analyzed || node->in_other_partition
1726 || DECL_EXTERNAL (node->decl)));
1727 }
1728 else
1729 notice_global_symbol (node->decl);
1730 node->reachable = 1;
1731
1732 node->next_needed = cgraph_nodes_queue;
1733 cgraph_nodes_queue = node;
1734 }
1735 }
1736
1737 /* Likewise indicate that a node is needed, i.e. reachable via some
1738 external means. */
1739
1740 void
1741 cgraph_mark_needed_node (struct cgraph_node *node)
1742 {
1743 node->needed = 1;
1744 gcc_assert (!node->global.inlined_to);
1745 cgraph_mark_reachable_node (node);
1746 }
1747
1748 /* Likewise indicate that a node is having address taken. */
1749
1750 void
1751 cgraph_mark_address_taken_node (struct cgraph_node *node)
1752 {
1753 gcc_assert (!node->global.inlined_to);
1754 cgraph_mark_reachable_node (node);
1755 node->address_taken = 1;
1756 }
1757
1758 /* Return local info for the compiled function. */
1759
1760 struct cgraph_local_info *
1761 cgraph_local_info (tree decl)
1762 {
1763 struct cgraph_node *node;
1764
1765 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1766 node = cgraph_node (decl);
1767 return &node->local;
1768 }
1769
1770 /* Return local info for the compiled function. */
1771
1772 struct cgraph_global_info *
1773 cgraph_global_info (tree decl)
1774 {
1775 struct cgraph_node *node;
1776
1777 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL && cgraph_global_info_ready);
1778 node = cgraph_node (decl);
1779 return &node->global;
1780 }
1781
1782 /* Return local info for the compiled function. */
1783
1784 struct cgraph_rtl_info *
1785 cgraph_rtl_info (tree decl)
1786 {
1787 struct cgraph_node *node;
1788
1789 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1790 node = cgraph_node (decl);
1791 if (decl != current_function_decl
1792 && !TREE_ASM_WRITTEN (node->decl))
1793 return NULL;
1794 return &node->rtl;
1795 }
1796
1797 /* Return a string describing the failure REASON. */
1798
1799 const char*
1800 cgraph_inline_failed_string (cgraph_inline_failed_t reason)
1801 {
1802 #undef DEFCIFCODE
1803 #define DEFCIFCODE(code, string) string,
1804
1805 static const char *cif_string_table[CIF_N_REASONS] = {
1806 #include "cif-code.def"
1807 };
1808
1809 /* Signedness of an enum type is implementation defined, so cast it
1810 to unsigned before testing. */
1811 gcc_assert ((unsigned) reason < CIF_N_REASONS);
1812 return cif_string_table[reason];
1813 }
1814
1815 /* Return name of the node used in debug output. */
1816 const char *
1817 cgraph_node_name (struct cgraph_node *node)
1818 {
1819 return lang_hooks.decl_printable_name (node->decl, 2);
1820 }
1821
1822 /* Names used to print out the availability enum. */
1823 const char * const cgraph_availability_names[] =
1824 {"unset", "not_available", "overwritable", "available", "local"};
1825
1826
1827 /* Dump call graph node NODE to file F. */
1828
1829 void
1830 dump_cgraph_node (FILE *f, struct cgraph_node *node)
1831 {
1832 struct cgraph_edge *edge;
1833 int indirect_calls_count = 0;
1834
1835 fprintf (f, "%s/%i(%i)", cgraph_node_name (node), node->uid,
1836 node->pid);
1837 dump_addr (f, " @", (void *)node);
1838 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
1839 fprintf (f, " (asm: %s)", IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->decl)));
1840 if (node->global.inlined_to)
1841 fprintf (f, " (inline copy in %s/%i)",
1842 cgraph_node_name (node->global.inlined_to),
1843 node->global.inlined_to->uid);
1844 if (node->same_comdat_group)
1845 fprintf (f, " (same comdat group as %s/%i)",
1846 cgraph_node_name (node->same_comdat_group),
1847 node->same_comdat_group->uid);
1848 if (node->clone_of)
1849 fprintf (f, " (clone of %s/%i)",
1850 cgraph_node_name (node->clone_of),
1851 node->clone_of->uid);
1852 if (cgraph_function_flags_ready)
1853 fprintf (f, " availability:%s",
1854 cgraph_availability_names [cgraph_function_body_availability (node)]);
1855 if (node->analyzed)
1856 fprintf (f, " analyzed");
1857 if (node->in_other_partition)
1858 fprintf (f, " in_other_partition");
1859 if (node->count)
1860 fprintf (f, " executed "HOST_WIDEST_INT_PRINT_DEC"x",
1861 (HOST_WIDEST_INT)node->count);
1862 if (node->local.inline_summary.self_time)
1863 fprintf (f, " %i time, %i benefit", node->local.inline_summary.self_time,
1864 node->local.inline_summary.time_inlining_benefit);
1865 if (node->global.time && node->global.time
1866 != node->local.inline_summary.self_time)
1867 fprintf (f, " (%i after inlining)", node->global.time);
1868 if (node->local.inline_summary.self_size)
1869 fprintf (f, " %i size, %i benefit", node->local.inline_summary.self_size,
1870 node->local.inline_summary.size_inlining_benefit);
1871 if (node->global.size && node->global.size
1872 != node->local.inline_summary.self_size)
1873 fprintf (f, " (%i after inlining)", node->global.size);
1874 if (node->local.inline_summary.estimated_self_stack_size)
1875 fprintf (f, " %i bytes stack usage", (int)node->local.inline_summary.estimated_self_stack_size);
1876 if (node->global.estimated_stack_size != node->local.inline_summary.estimated_self_stack_size)
1877 fprintf (f, " %i bytes after inlining", (int)node->global.estimated_stack_size);
1878 if (node->origin)
1879 fprintf (f, " nested in: %s", cgraph_node_name (node->origin));
1880 if (node->needed)
1881 fprintf (f, " needed");
1882 if (node->address_taken)
1883 fprintf (f, " address_taken");
1884 else if (node->reachable)
1885 fprintf (f, " reachable");
1886 else if (node->reachable_from_other_partition)
1887 fprintf (f, " reachable_from_other_partition");
1888 if (gimple_has_body_p (node->decl))
1889 fprintf (f, " body");
1890 if (node->process)
1891 fprintf (f, " process");
1892 if (node->local.local)
1893 fprintf (f, " local");
1894 if (node->local.externally_visible)
1895 fprintf (f, " externally_visible");
1896 if (node->resolution != LDPR_UNKNOWN)
1897 fprintf (f, " %s",
1898 ld_plugin_symbol_resolution_names[(int)node->resolution]);
1899 if (node->local.finalized)
1900 fprintf (f, " finalized");
1901 if (node->local.disregard_inline_limits)
1902 fprintf (f, " always_inline");
1903 else if (node->local.inlinable)
1904 fprintf (f, " inlinable");
1905 else if (node->local.versionable)
1906 fprintf (f, " versionable");
1907 if (node->local.redefined_extern_inline)
1908 fprintf (f, " redefined_extern_inline");
1909 if (TREE_ASM_WRITTEN (node->decl))
1910 fprintf (f, " asm_written");
1911 if (node->only_called_at_startup)
1912 fprintf (f, " only_called_at_startup");
1913 if (node->only_called_at_exit)
1914 fprintf (f, " only_called_at_exit");
1915
1916 fprintf (f, "\n called by: ");
1917 for (edge = node->callers; edge; edge = edge->next_caller)
1918 {
1919 fprintf (f, "%s/%i ", cgraph_node_name (edge->caller),
1920 edge->caller->uid);
1921 if (edge->count)
1922 fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
1923 (HOST_WIDEST_INT)edge->count);
1924 if (edge->frequency)
1925 fprintf (f, "(%.2f per call) ",
1926 edge->frequency / (double)CGRAPH_FREQ_BASE);
1927 if (!edge->inline_failed)
1928 fprintf(f, "(inlined) ");
1929 if (edge->indirect_inlining_edge)
1930 fprintf(f, "(indirect_inlining) ");
1931 if (edge->can_throw_external)
1932 fprintf(f, "(can throw external) ");
1933 }
1934
1935 fprintf (f, "\n calls: ");
1936 for (edge = node->callees; edge; edge = edge->next_callee)
1937 {
1938 fprintf (f, "%s/%i ", cgraph_node_name (edge->callee),
1939 edge->callee->uid);
1940 if (!edge->inline_failed)
1941 fprintf(f, "(inlined) ");
1942 if (edge->indirect_inlining_edge)
1943 fprintf(f, "(indirect_inlining) ");
1944 if (edge->count)
1945 fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
1946 (HOST_WIDEST_INT)edge->count);
1947 if (edge->frequency)
1948 fprintf (f, "(%.2f per call) ",
1949 edge->frequency / (double)CGRAPH_FREQ_BASE);
1950 if (edge->loop_nest)
1951 fprintf (f, "(nested in %i loops) ", edge->loop_nest);
1952 if (edge->can_throw_external)
1953 fprintf(f, "(can throw external) ");
1954 }
1955 fprintf (f, "\n");
1956 fprintf (f, " References: ");
1957 ipa_dump_references (f, &node->ref_list);
1958 fprintf (f, " Refering this function: ");
1959 ipa_dump_refering (f, &node->ref_list);
1960
1961 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
1962 indirect_calls_count++;
1963 if (indirect_calls_count)
1964 fprintf (f, " has %i outgoing edges for indirect calls.\n",
1965 indirect_calls_count);
1966
1967 if (node->same_body)
1968 {
1969 struct cgraph_node *n;
1970 fprintf (f, " aliases & thunks:");
1971 for (n = node->same_body; n; n = n->next)
1972 {
1973 fprintf (f, " %s/%i", cgraph_node_name (n), n->uid);
1974 if (n->thunk.thunk_p)
1975 {
1976 fprintf (f, " (thunk of %s fixed offset %i virtual value %i has "
1977 "virtual offset %i",
1978 lang_hooks.decl_printable_name (n->thunk.alias, 2),
1979 (int)n->thunk.fixed_offset,
1980 (int)n->thunk.virtual_value,
1981 (int)n->thunk.virtual_offset_p);
1982 fprintf (f, ")");
1983 }
1984 if (DECL_ASSEMBLER_NAME_SET_P (n->decl))
1985 fprintf (f, " (asm: %s)", IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (n->decl)));
1986 }
1987 fprintf (f, "\n");
1988 }
1989 }
1990
1991
1992 /* Dump call graph node NODE to stderr. */
1993
1994 DEBUG_FUNCTION void
1995 debug_cgraph_node (struct cgraph_node *node)
1996 {
1997 dump_cgraph_node (stderr, node);
1998 }
1999
2000
2001 /* Dump the callgraph to file F. */
2002
2003 void
2004 dump_cgraph (FILE *f)
2005 {
2006 struct cgraph_node *node;
2007
2008 fprintf (f, "callgraph:\n\n");
2009 for (node = cgraph_nodes; node; node = node->next)
2010 dump_cgraph_node (f, node);
2011 }
2012
2013
2014 /* Dump the call graph to stderr. */
2015
2016 DEBUG_FUNCTION void
2017 debug_cgraph (void)
2018 {
2019 dump_cgraph (stderr);
2020 }
2021
2022
2023 /* Set the DECL_ASSEMBLER_NAME and update cgraph hashtables. */
2024
2025 void
2026 change_decl_assembler_name (tree decl, tree name)
2027 {
2028 struct cgraph_node *node;
2029 void **slot;
2030 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
2031 SET_DECL_ASSEMBLER_NAME (decl, name);
2032 else
2033 {
2034 if (name == DECL_ASSEMBLER_NAME (decl))
2035 return;
2036
2037 if (assembler_name_hash
2038 && TREE_CODE (decl) == FUNCTION_DECL
2039 && (node = cgraph_get_node_or_alias (decl)) != NULL)
2040 {
2041 tree old_name = DECL_ASSEMBLER_NAME (decl);
2042 slot = htab_find_slot_with_hash (assembler_name_hash, old_name,
2043 decl_assembler_name_hash (old_name),
2044 NO_INSERT);
2045 /* Inline clones are not hashed. */
2046 if (slot && *slot == node)
2047 htab_clear_slot (assembler_name_hash, slot);
2048 }
2049 if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))
2050 && DECL_RTL_SET_P (decl))
2051 warning (0, "%D renamed after being referenced in assembly", decl);
2052
2053 SET_DECL_ASSEMBLER_NAME (decl, name);
2054 }
2055 if (assembler_name_hash
2056 && TREE_CODE (decl) == FUNCTION_DECL
2057 && (node = cgraph_get_node_or_alias (decl)) != NULL)
2058 {
2059 slot = htab_find_slot_with_hash (assembler_name_hash, name,
2060 decl_assembler_name_hash (name),
2061 INSERT);
2062 gcc_assert (!*slot);
2063 *slot = node;
2064 }
2065 }
2066
2067 /* Add a top-level asm statement to the list. */
2068
2069 struct cgraph_asm_node *
2070 cgraph_add_asm_node (tree asm_str)
2071 {
2072 struct cgraph_asm_node *node;
2073
2074 node = ggc_alloc_cleared_cgraph_asm_node ();
2075 node->asm_str = asm_str;
2076 node->order = cgraph_order++;
2077 node->next = NULL;
2078 if (cgraph_asm_nodes == NULL)
2079 cgraph_asm_nodes = node;
2080 else
2081 cgraph_asm_last_node->next = node;
2082 cgraph_asm_last_node = node;
2083 return node;
2084 }
2085
2086 /* Return true when the DECL can possibly be inlined. */
2087 bool
2088 cgraph_function_possibly_inlined_p (tree decl)
2089 {
2090 if (!cgraph_global_info_ready)
2091 return !DECL_UNINLINABLE (decl);
2092 return DECL_POSSIBLY_INLINED (decl);
2093 }
2094
2095 /* Create clone of E in the node N represented by CALL_EXPR the callgraph. */
2096 struct cgraph_edge *
2097 cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n,
2098 gimple call_stmt, unsigned stmt_uid, gcov_type count_scale,
2099 int freq_scale, int loop_nest, bool update_original)
2100 {
2101 struct cgraph_edge *new_edge;
2102 gcov_type count = e->count * count_scale / REG_BR_PROB_BASE;
2103 gcov_type freq;
2104
2105 /* We do not want to ignore loop nest after frequency drops to 0. */
2106 if (!freq_scale)
2107 freq_scale = 1;
2108 freq = e->frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE;
2109 if (freq > CGRAPH_FREQ_MAX)
2110 freq = CGRAPH_FREQ_MAX;
2111
2112 if (e->indirect_unknown_callee)
2113 {
2114 tree decl;
2115
2116 if (call_stmt && (decl = gimple_call_fndecl (call_stmt)))
2117 {
2118 struct cgraph_node *callee = cgraph_node (decl);
2119 new_edge = cgraph_create_edge (n, callee, call_stmt, count, freq,
2120 e->loop_nest + loop_nest);
2121 }
2122 else
2123 {
2124 new_edge = cgraph_create_indirect_edge (n, call_stmt,
2125 e->indirect_info->ecf_flags,
2126 count, freq,
2127 e->loop_nest + loop_nest);
2128 *new_edge->indirect_info = *e->indirect_info;
2129 }
2130 }
2131 else
2132 {
2133 new_edge = cgraph_create_edge (n, e->callee, call_stmt, count, freq,
2134 e->loop_nest + loop_nest);
2135 if (e->indirect_info)
2136 {
2137 new_edge->indirect_info
2138 = ggc_alloc_cleared_cgraph_indirect_call_info ();
2139 *new_edge->indirect_info = *e->indirect_info;
2140 }
2141 }
2142
2143 new_edge->inline_failed = e->inline_failed;
2144 new_edge->indirect_inlining_edge = e->indirect_inlining_edge;
2145 new_edge->lto_stmt_uid = stmt_uid;
2146 /* Clone flags that depend on call_stmt availability manually. */
2147 new_edge->can_throw_external = e->can_throw_external;
2148 new_edge->call_stmt_cannot_inline_p = e->call_stmt_cannot_inline_p;
2149 if (update_original)
2150 {
2151 e->count -= new_edge->count;
2152 if (e->count < 0)
2153 e->count = 0;
2154 }
2155 cgraph_call_edge_duplication_hooks (e, new_edge);
2156 return new_edge;
2157 }
2158
2159 /* Create node representing clone of N executed COUNT times. Decrease
2160 the execution counts from original node too.
2161 The new clone will have decl set to DECL that may or may not be the same
2162 as decl of N.
2163
2164 When UPDATE_ORIGINAL is true, the counts are subtracted from the original
2165 function's profile to reflect the fact that part of execution is handled
2166 by node. */
2167 struct cgraph_node *
2168 cgraph_clone_node (struct cgraph_node *n, tree decl, gcov_type count, int freq,
2169 int loop_nest, bool update_original,
2170 VEC(cgraph_edge_p,heap) *redirect_callers)
2171 {
2172 struct cgraph_node *new_node = cgraph_create_node ();
2173 struct cgraph_edge *e;
2174 gcov_type count_scale;
2175 unsigned i;
2176
2177 new_node->decl = decl;
2178 new_node->origin = n->origin;
2179 if (new_node->origin)
2180 {
2181 new_node->next_nested = new_node->origin->nested;
2182 new_node->origin->nested = new_node;
2183 }
2184 new_node->analyzed = n->analyzed;
2185 new_node->local = n->local;
2186 new_node->local.externally_visible = false;
2187 new_node->local.local = true;
2188 new_node->local.vtable_method = false;
2189 new_node->global = n->global;
2190 new_node->rtl = n->rtl;
2191 new_node->count = count;
2192 new_node->frequency = n->frequency;
2193 new_node->clone = n->clone;
2194 new_node->clone.tree_map = 0;
2195 if (n->count)
2196 {
2197 if (new_node->count > n->count)
2198 count_scale = REG_BR_PROB_BASE;
2199 else
2200 count_scale = new_node->count * REG_BR_PROB_BASE / n->count;
2201 }
2202 else
2203 count_scale = 0;
2204 if (update_original)
2205 {
2206 n->count -= count;
2207 if (n->count < 0)
2208 n->count = 0;
2209 }
2210
2211 FOR_EACH_VEC_ELT (cgraph_edge_p, redirect_callers, i, e)
2212 {
2213 /* Redirect calls to the old version node to point to its new
2214 version. */
2215 cgraph_redirect_edge_callee (e, new_node);
2216 }
2217
2218
2219 for (e = n->callees;e; e=e->next_callee)
2220 cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
2221 count_scale, freq, loop_nest, update_original);
2222
2223 for (e = n->indirect_calls; e; e = e->next_callee)
2224 cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
2225 count_scale, freq, loop_nest, update_original);
2226 ipa_clone_references (new_node, NULL, &n->ref_list);
2227
2228 new_node->next_sibling_clone = n->clones;
2229 if (n->clones)
2230 n->clones->prev_sibling_clone = new_node;
2231 n->clones = new_node;
2232 new_node->clone_of = n;
2233
2234 cgraph_call_node_duplication_hooks (n, new_node);
2235 if (n->decl != decl)
2236 {
2237 struct cgraph_node **slot;
2238 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, new_node, INSERT);
2239 gcc_assert (!*slot);
2240 *slot = new_node;
2241 if (assembler_name_hash)
2242 {
2243 void **aslot;
2244 tree name = DECL_ASSEMBLER_NAME (decl);
2245
2246 aslot = htab_find_slot_with_hash (assembler_name_hash, name,
2247 decl_assembler_name_hash (name),
2248 INSERT);
2249 gcc_assert (!*aslot);
2250 *aslot = new_node;
2251 }
2252 }
2253 return new_node;
2254 }
2255
2256 /* Create a new name for clone of DECL, add SUFFIX. Returns an identifier. */
2257
2258 static GTY(()) unsigned int clone_fn_id_num;
2259
2260 tree
2261 clone_function_name (tree decl, const char *suffix)
2262 {
2263 tree name = DECL_ASSEMBLER_NAME (decl);
2264 size_t len = IDENTIFIER_LENGTH (name);
2265 char *tmp_name, *prefix;
2266
2267 prefix = XALLOCAVEC (char, len + strlen (suffix) + 2);
2268 memcpy (prefix, IDENTIFIER_POINTER (name), len);
2269 strcpy (prefix + len + 1, suffix);
2270 #ifndef NO_DOT_IN_LABEL
2271 prefix[len] = '.';
2272 #elif !defined NO_DOLLAR_IN_LABEL
2273 prefix[len] = '$';
2274 #else
2275 prefix[len] = '_';
2276 #endif
2277 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix, clone_fn_id_num++);
2278 return get_identifier (tmp_name);
2279 }
2280
2281 /* Create callgraph node clone with new declaration. The actual body will
2282 be copied later at compilation stage.
2283
2284 TODO: after merging in ipa-sra use function call notes instead of args_to_skip
2285 bitmap interface.
2286 */
2287 struct cgraph_node *
2288 cgraph_create_virtual_clone (struct cgraph_node *old_node,
2289 VEC(cgraph_edge_p,heap) *redirect_callers,
2290 VEC(ipa_replace_map_p,gc) *tree_map,
2291 bitmap args_to_skip,
2292 const char * suffix)
2293 {
2294 tree old_decl = old_node->decl;
2295 struct cgraph_node *new_node = NULL;
2296 tree new_decl;
2297 size_t i;
2298 struct ipa_replace_map *map;
2299
2300 if (!flag_wpa)
2301 gcc_checking_assert (tree_versionable_function_p (old_decl));
2302
2303 gcc_assert (old_node->local.can_change_signature || !args_to_skip);
2304
2305 /* Make a new FUNCTION_DECL tree node */
2306 if (!args_to_skip)
2307 new_decl = copy_node (old_decl);
2308 else
2309 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
2310 DECL_STRUCT_FUNCTION (new_decl) = NULL;
2311
2312 /* Generate a new name for the new version. */
2313 DECL_NAME (new_decl) = clone_function_name (old_decl, suffix);
2314 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
2315 SET_DECL_RTL (new_decl, NULL);
2316
2317 new_node = cgraph_clone_node (old_node, new_decl, old_node->count,
2318 CGRAPH_FREQ_BASE, 0, false,
2319 redirect_callers);
2320 /* Update the properties.
2321 Make clone visible only within this translation unit. Make sure
2322 that is not weak also.
2323 ??? We cannot use COMDAT linkage because there is no
2324 ABI support for this. */
2325 DECL_EXTERNAL (new_node->decl) = 0;
2326 if (DECL_ONE_ONLY (old_decl))
2327 DECL_SECTION_NAME (new_node->decl) = NULL;
2328 DECL_COMDAT_GROUP (new_node->decl) = 0;
2329 TREE_PUBLIC (new_node->decl) = 0;
2330 DECL_COMDAT (new_node->decl) = 0;
2331 DECL_WEAK (new_node->decl) = 0;
2332 new_node->clone.tree_map = tree_map;
2333 new_node->clone.args_to_skip = args_to_skip;
2334 FOR_EACH_VEC_ELT (ipa_replace_map_p, tree_map, i, map)
2335 {
2336 tree var = map->new_tree;
2337
2338 STRIP_NOPS (var);
2339 if (TREE_CODE (var) != ADDR_EXPR)
2340 continue;
2341 var = get_base_var (var);
2342 if (!var)
2343 continue;
2344
2345 /* Record references of the future statement initializing the constant
2346 argument. */
2347 if (TREE_CODE (var) == FUNCTION_DECL)
2348 ipa_record_reference (new_node, NULL, cgraph_node (var),
2349 NULL, IPA_REF_ADDR, NULL);
2350 else if (TREE_CODE (var) == VAR_DECL)
2351 ipa_record_reference (new_node, NULL, NULL, varpool_node (var),
2352 IPA_REF_ADDR, NULL);
2353 }
2354 if (!args_to_skip)
2355 new_node->clone.combined_args_to_skip = old_node->clone.combined_args_to_skip;
2356 else if (old_node->clone.combined_args_to_skip)
2357 {
2358 int newi = 0, oldi = 0;
2359 tree arg;
2360 bitmap new_args_to_skip = BITMAP_GGC_ALLOC ();
2361 struct cgraph_node *orig_node;
2362 for (orig_node = old_node; orig_node->clone_of; orig_node = orig_node->clone_of)
2363 ;
2364 for (arg = DECL_ARGUMENTS (orig_node->decl); arg; arg = DECL_CHAIN (arg), oldi++)
2365 {
2366 if (bitmap_bit_p (old_node->clone.combined_args_to_skip, oldi))
2367 {
2368 bitmap_set_bit (new_args_to_skip, oldi);
2369 continue;
2370 }
2371 if (bitmap_bit_p (args_to_skip, newi))
2372 bitmap_set_bit (new_args_to_skip, oldi);
2373 newi++;
2374 }
2375 new_node->clone.combined_args_to_skip = new_args_to_skip;
2376 }
2377 else
2378 new_node->clone.combined_args_to_skip = args_to_skip;
2379 new_node->local.externally_visible = 0;
2380 new_node->local.local = 1;
2381 new_node->lowered = true;
2382 new_node->reachable = true;
2383
2384
2385 return new_node;
2386 }
2387
2388 /* NODE is no longer nested function; update cgraph accordingly. */
2389 void
2390 cgraph_unnest_node (struct cgraph_node *node)
2391 {
2392 struct cgraph_node **node2 = &node->origin->nested;
2393 gcc_assert (node->origin);
2394
2395 while (*node2 != node)
2396 node2 = &(*node2)->next_nested;
2397 *node2 = node->next_nested;
2398 node->origin = NULL;
2399 }
2400
2401 /* Return function availability. See cgraph.h for description of individual
2402 return values. */
2403 enum availability
2404 cgraph_function_body_availability (struct cgraph_node *node)
2405 {
2406 enum availability avail;
2407 gcc_assert (cgraph_function_flags_ready);
2408 if (!node->analyzed)
2409 avail = AVAIL_NOT_AVAILABLE;
2410 else if (node->local.local)
2411 avail = AVAIL_LOCAL;
2412 else if (!node->local.externally_visible)
2413 avail = AVAIL_AVAILABLE;
2414 /* Inline functions are safe to be analyzed even if their symbol can
2415 be overwritten at runtime. It is not meaningful to enforce any sane
2416 behaviour on replacing inline function by different body. */
2417 else if (DECL_DECLARED_INLINE_P (node->decl))
2418 avail = AVAIL_AVAILABLE;
2419
2420 /* If the function can be overwritten, return OVERWRITABLE. Take
2421 care at least of two notable extensions - the COMDAT functions
2422 used to share template instantiations in C++ (this is symmetric
2423 to code cp_cannot_inline_tree_fn and probably shall be shared and
2424 the inlinability hooks completely eliminated).
2425
2426 ??? Does the C++ one definition rule allow us to always return
2427 AVAIL_AVAILABLE here? That would be good reason to preserve this
2428 bit. */
2429
2430 else if (decl_replaceable_p (node->decl) && !DECL_EXTERNAL (node->decl))
2431 avail = AVAIL_OVERWRITABLE;
2432 else avail = AVAIL_AVAILABLE;
2433
2434 return avail;
2435 }
2436
2437 /* Add the function FNDECL to the call graph.
2438 Unlike cgraph_finalize_function, this function is intended to be used
2439 by middle end and allows insertion of new function at arbitrary point
2440 of compilation. The function can be either in high, low or SSA form
2441 GIMPLE.
2442
2443 The function is assumed to be reachable and have address taken (so no
2444 API breaking optimizations are performed on it).
2445
2446 Main work done by this function is to enqueue the function for later
2447 processing to avoid need the passes to be re-entrant. */
2448
2449 void
2450 cgraph_add_new_function (tree fndecl, bool lowered)
2451 {
2452 struct cgraph_node *node;
2453 switch (cgraph_state)
2454 {
2455 case CGRAPH_STATE_CONSTRUCTION:
2456 /* Just enqueue function to be processed at nearest occurrence. */
2457 node = cgraph_node (fndecl);
2458 node->next_needed = cgraph_new_nodes;
2459 if (lowered)
2460 node->lowered = true;
2461 cgraph_new_nodes = node;
2462 break;
2463
2464 case CGRAPH_STATE_IPA:
2465 case CGRAPH_STATE_IPA_SSA:
2466 case CGRAPH_STATE_EXPANSION:
2467 /* Bring the function into finalized state and enqueue for later
2468 analyzing and compilation. */
2469 node = cgraph_node (fndecl);
2470 node->local.local = false;
2471 node->local.finalized = true;
2472 node->reachable = node->needed = true;
2473 if (!lowered && cgraph_state == CGRAPH_STATE_EXPANSION)
2474 {
2475 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
2476 current_function_decl = fndecl;
2477 gimple_register_cfg_hooks ();
2478 tree_lowering_passes (fndecl);
2479 bitmap_obstack_initialize (NULL);
2480 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
2481 execute_pass_list (pass_early_local_passes.pass.sub);
2482 bitmap_obstack_release (NULL);
2483 pop_cfun ();
2484 current_function_decl = NULL;
2485
2486 lowered = true;
2487 }
2488 if (lowered)
2489 node->lowered = true;
2490 node->next_needed = cgraph_new_nodes;
2491 cgraph_new_nodes = node;
2492 break;
2493
2494 case CGRAPH_STATE_FINISHED:
2495 /* At the very end of compilation we have to do all the work up
2496 to expansion. */
2497 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
2498 current_function_decl = fndecl;
2499 gimple_register_cfg_hooks ();
2500 if (!lowered)
2501 tree_lowering_passes (fndecl);
2502 bitmap_obstack_initialize (NULL);
2503 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
2504 execute_pass_list (pass_early_local_passes.pass.sub);
2505 bitmap_obstack_release (NULL);
2506 tree_rest_of_compilation (fndecl);
2507 pop_cfun ();
2508 current_function_decl = NULL;
2509 break;
2510 }
2511
2512 /* Set a personality if required and we already passed EH lowering. */
2513 if (lowered
2514 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
2515 == eh_personality_lang))
2516 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
2517 }
2518
2519 /* Return true if NODE can be made local for API change.
2520 Extern inline functions and C++ COMDAT functions can be made local
2521 at the expense of possible code size growth if function is used in multiple
2522 compilation units. */
2523 bool
2524 cgraph_node_can_be_local_p (struct cgraph_node *node)
2525 {
2526 return (!node->needed && !node->address_taken
2527 && ((DECL_COMDAT (node->decl) && !node->same_comdat_group)
2528 || !node->local.externally_visible));
2529 }
2530
2531 /* Make DECL local. FIXME: We shouldn't need to mess with rtl this early,
2532 but other code such as notice_global_symbol generates rtl. */
2533 void
2534 cgraph_make_decl_local (tree decl)
2535 {
2536 rtx rtl, symbol;
2537
2538 if (TREE_CODE (decl) == VAR_DECL)
2539 DECL_COMMON (decl) = 0;
2540 else gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
2541
2542 if (DECL_COMDAT (decl))
2543 {
2544 /* It is possible that we are linking against library defining same COMDAT
2545 function. To avoid conflict we need to rename our local name of the
2546 function just in the case WHOPR partitioning decide to make it hidden
2547 to avoid cross partition references. */
2548 if (flag_wpa)
2549 {
2550 const char *old_name;
2551
2552 old_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
2553 if (TREE_CODE (decl) == FUNCTION_DECL)
2554 {
2555 struct cgraph_node *node = cgraph_get_node_or_alias (decl);
2556 change_decl_assembler_name (decl,
2557 clone_function_name (decl, "local"));
2558 if (node->local.lto_file_data)
2559 lto_record_renamed_decl (node->local.lto_file_data,
2560 old_name,
2561 IDENTIFIER_POINTER
2562 (DECL_ASSEMBLER_NAME (decl)));
2563 }
2564 else if (TREE_CODE (decl) == VAR_DECL)
2565 {
2566 struct varpool_node *vnode = varpool_get_node (decl);
2567 /* change_decl_assembler_name will warn here on vtables because
2568 C++ frontend still sets TREE_SYMBOL_REFERENCED on them. */
2569 SET_DECL_ASSEMBLER_NAME (decl,
2570 clone_function_name (decl, "local"));
2571 if (vnode->lto_file_data)
2572 lto_record_renamed_decl (vnode->lto_file_data,
2573 old_name,
2574 IDENTIFIER_POINTER
2575 (DECL_ASSEMBLER_NAME (decl)));
2576 }
2577 }
2578 DECL_SECTION_NAME (decl) = 0;
2579 DECL_COMDAT (decl) = 0;
2580 }
2581 DECL_COMDAT_GROUP (decl) = 0;
2582 DECL_WEAK (decl) = 0;
2583 DECL_EXTERNAL (decl) = 0;
2584 TREE_PUBLIC (decl) = 0;
2585 if (!DECL_RTL_SET_P (decl))
2586 return;
2587
2588 /* Update rtl flags. */
2589 make_decl_rtl (decl);
2590
2591 rtl = DECL_RTL (decl);
2592 if (!MEM_P (rtl))
2593 return;
2594
2595 symbol = XEXP (rtl, 0);
2596 if (GET_CODE (symbol) != SYMBOL_REF)
2597 return;
2598
2599 SYMBOL_REF_WEAK (symbol) = DECL_WEAK (decl);
2600 }
2601
2602 /* Bring NODE local. */
2603 void
2604 cgraph_make_node_local (struct cgraph_node *node)
2605 {
2606 gcc_assert (cgraph_node_can_be_local_p (node));
2607 if (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
2608 {
2609 struct cgraph_node *alias;
2610 cgraph_make_decl_local (node->decl);
2611
2612 for (alias = node->same_body; alias; alias = alias->next)
2613 cgraph_make_decl_local (alias->decl);
2614
2615 node->local.externally_visible = false;
2616 node->local.local = true;
2617 node->resolution = LDPR_PREVAILING_DEF_IRONLY;
2618 gcc_assert (cgraph_function_body_availability (node) == AVAIL_LOCAL);
2619 }
2620 }
2621
2622 /* Set TREE_NOTHROW on NODE's decl and on same_body aliases of NODE
2623 if any to NOTHROW. */
2624
2625 void
2626 cgraph_set_nothrow_flag (struct cgraph_node *node, bool nothrow)
2627 {
2628 struct cgraph_node *alias;
2629 TREE_NOTHROW (node->decl) = nothrow;
2630 for (alias = node->same_body; alias; alias = alias->next)
2631 TREE_NOTHROW (alias->decl) = nothrow;
2632 }
2633
2634 /* Set TREE_READONLY on NODE's decl and on same_body aliases of NODE
2635 if any to READONLY. */
2636
2637 void
2638 cgraph_set_const_flag (struct cgraph_node *node, bool readonly, bool looping)
2639 {
2640 struct cgraph_node *alias;
2641 /* Static constructors and destructors without a side effect can be
2642 optimized out. */
2643 if (!looping && readonly)
2644 {
2645 if (DECL_STATIC_CONSTRUCTOR (node->decl))
2646 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2647 if (DECL_STATIC_DESTRUCTOR (node->decl))
2648 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2649 }
2650 TREE_READONLY (node->decl) = readonly;
2651 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = looping;
2652 for (alias = node->same_body; alias; alias = alias->next)
2653 {
2654 TREE_READONLY (alias->decl) = readonly;
2655 DECL_LOOPING_CONST_OR_PURE_P (alias->decl) = looping;
2656 }
2657 }
2658
2659 /* Set DECL_PURE_P on NODE's decl and on same_body aliases of NODE
2660 if any to PURE. */
2661
2662 void
2663 cgraph_set_pure_flag (struct cgraph_node *node, bool pure, bool looping)
2664 {
2665 struct cgraph_node *alias;
2666 /* Static constructors and destructors without a side effect can be
2667 optimized out. */
2668 if (!looping && pure)
2669 {
2670 if (DECL_STATIC_CONSTRUCTOR (node->decl))
2671 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2672 if (DECL_STATIC_DESTRUCTOR (node->decl))
2673 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2674 }
2675 DECL_PURE_P (node->decl) = pure;
2676 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = looping;
2677 for (alias = node->same_body; alias; alias = alias->next)
2678 {
2679 DECL_PURE_P (alias->decl) = pure;
2680 DECL_LOOPING_CONST_OR_PURE_P (alias->decl) = looping;
2681 }
2682 }
2683
2684 /* See if the frequency of NODE can be updated based on frequencies of its
2685 callers. */
2686 bool
2687 cgraph_propagate_frequency (struct cgraph_node *node)
2688 {
2689 bool maybe_unlikely_executed = true, maybe_executed_once = true;
2690 bool only_called_at_startup = true;
2691 bool only_called_at_exit = true;
2692 bool changed = false;
2693 struct cgraph_edge *edge;
2694
2695 if (!node->local.local)
2696 return false;
2697 gcc_assert (node->analyzed);
2698 if (dump_file && (dump_flags & TDF_DETAILS))
2699 fprintf (dump_file, "Processing frequency %s\n", cgraph_node_name (node));
2700
2701 for (edge = node->callers;
2702 edge && (maybe_unlikely_executed || maybe_executed_once
2703 || only_called_at_startup || only_called_at_exit);
2704 edge = edge->next_caller)
2705 {
2706 if (edge->caller != node)
2707 {
2708 only_called_at_startup &= edge->caller->only_called_at_startup;
2709 /* It makes sense to put main() together with the static constructors.
2710 It will be executed for sure, but rest of functions called from
2711 main are definitely not at startup only. */
2712 if (MAIN_NAME_P (DECL_NAME (edge->caller->decl)))
2713 only_called_at_startup = 0;
2714 only_called_at_exit &= edge->caller->only_called_at_exit;
2715 }
2716 if (!edge->frequency)
2717 continue;
2718 switch (edge->caller->frequency)
2719 {
2720 case NODE_FREQUENCY_UNLIKELY_EXECUTED:
2721 break;
2722 case NODE_FREQUENCY_EXECUTED_ONCE:
2723 if (dump_file && (dump_flags & TDF_DETAILS))
2724 fprintf (dump_file, " Called by %s that is executed once\n",
2725 cgraph_node_name (node));
2726 maybe_unlikely_executed = false;
2727 if (edge->loop_nest)
2728 {
2729 maybe_executed_once = false;
2730 if (dump_file && (dump_flags & TDF_DETAILS))
2731 fprintf (dump_file, " Called in loop\n");
2732 }
2733 break;
2734 case NODE_FREQUENCY_HOT:
2735 case NODE_FREQUENCY_NORMAL:
2736 if (dump_file && (dump_flags & TDF_DETAILS))
2737 fprintf (dump_file, " Called by %s that is normal or hot\n",
2738 cgraph_node_name (node));
2739 maybe_unlikely_executed = false;
2740 maybe_executed_once = false;
2741 break;
2742 }
2743 }
2744 if ((only_called_at_startup && !only_called_at_exit)
2745 && !node->only_called_at_startup)
2746 {
2747 node->only_called_at_startup = true;
2748 if (dump_file)
2749 fprintf (dump_file, "Node %s promoted to only called at startup.\n",
2750 cgraph_node_name (node));
2751 changed = true;
2752 }
2753 if ((only_called_at_exit && !only_called_at_startup)
2754 && !node->only_called_at_exit)
2755 {
2756 node->only_called_at_exit = true;
2757 if (dump_file)
2758 fprintf (dump_file, "Node %s promoted to only called at exit.\n",
2759 cgraph_node_name (node));
2760 changed = true;
2761 }
2762 /* These come either from profile or user hints; never update them. */
2763 if (node->frequency == NODE_FREQUENCY_HOT
2764 || node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
2765 return changed;
2766 if (maybe_unlikely_executed)
2767 {
2768 node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED;
2769 if (dump_file)
2770 fprintf (dump_file, "Node %s promoted to unlikely executed.\n",
2771 cgraph_node_name (node));
2772 changed = true;
2773 }
2774 else if (maybe_executed_once && node->frequency != NODE_FREQUENCY_EXECUTED_ONCE)
2775 {
2776 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2777 if (dump_file)
2778 fprintf (dump_file, "Node %s promoted to executed once.\n",
2779 cgraph_node_name (node));
2780 changed = true;
2781 }
2782 return changed;
2783 }
2784
2785 /* Return true when NODE can not return or throw and thus
2786 it is safe to ignore its side effects for IPA analysis. */
2787
2788 bool
2789 cgraph_node_cannot_return (struct cgraph_node *node)
2790 {
2791 int flags = flags_from_decl_or_type (node->decl);
2792 if (!flag_exceptions)
2793 return (flags & ECF_NORETURN) != 0;
2794 else
2795 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2796 == (ECF_NORETURN | ECF_NOTHROW));
2797 }
2798
2799 /* Return true when call of E can not lead to return from caller
2800 and thus it is safe to ignore its side effects for IPA analysis
2801 when computing side effects of the caller.
2802 FIXME: We could actually mark all edges that have no reaching
2803 patch to EXIT_BLOCK_PTR or throw to get better results. */
2804 bool
2805 cgraph_edge_cannot_lead_to_return (struct cgraph_edge *e)
2806 {
2807 if (cgraph_node_cannot_return (e->caller))
2808 return true;
2809 if (e->indirect_unknown_callee)
2810 {
2811 int flags = e->indirect_info->ecf_flags;
2812 if (!flag_exceptions)
2813 return (flags & ECF_NORETURN) != 0;
2814 else
2815 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2816 == (ECF_NORETURN | ECF_NOTHROW));
2817 }
2818 else
2819 return cgraph_node_cannot_return (e->callee);
2820 }
2821
2822 /* Return true when function NODE can be removed from callgraph
2823 if all direct calls are eliminated. */
2824
2825 bool
2826 cgraph_can_remove_if_no_direct_calls_and_refs_p (struct cgraph_node *node)
2827 {
2828 gcc_assert (!node->global.inlined_to);
2829 /* Extern inlines can always go, we will use the external definition. */
2830 if (DECL_EXTERNAL (node->decl))
2831 return true;
2832 /* When function is needed, we can not remove it. */
2833 if (node->needed || node->reachable_from_other_partition)
2834 return false;
2835 if (DECL_STATIC_CONSTRUCTOR (node->decl)
2836 || DECL_STATIC_DESTRUCTOR (node->decl))
2837 return false;
2838 /* Only COMDAT functions can be removed if externally visible. */
2839 if (node->local.externally_visible
2840 && (!DECL_COMDAT (node->decl)
2841 || cgraph_used_from_object_file_p (node)))
2842 return false;
2843 return true;
2844 }
2845
2846 /* Return true when function NODE can be expected to be removed
2847 from program when direct calls in this compilation unit are removed.
2848
2849 As a special case COMDAT functions are
2850 cgraph_can_remove_if_no_direct_calls_p while the are not
2851 cgraph_only_called_directly_p (it is possible they are called from other
2852 unit)
2853
2854 This function behaves as cgraph_only_called_directly_p because eliminating
2855 all uses of COMDAT function does not make it necessarily disappear from
2856 the program unless we are compiling whole program or we do LTO. In this
2857 case we know we win since dynamic linking will not really discard the
2858 linkonce section. */
2859
2860 bool
2861 cgraph_will_be_removed_from_program_if_no_direct_calls (struct cgraph_node *node)
2862 {
2863 gcc_assert (!node->global.inlined_to);
2864 if (cgraph_used_from_object_file_p (node))
2865 return false;
2866 if (!in_lto_p && !flag_whole_program)
2867 return cgraph_only_called_directly_p (node);
2868 else
2869 {
2870 if (DECL_EXTERNAL (node->decl))
2871 return true;
2872 return cgraph_can_remove_if_no_direct_calls_p (node);
2873 }
2874 }
2875
2876 /* Return true when RESOLUTION indicate that linker will use
2877 the symbol from non-LTO object files. */
2878
2879 bool
2880 resolution_used_from_other_file_p (enum ld_plugin_symbol_resolution resolution)
2881 {
2882 return (resolution == LDPR_PREVAILING_DEF
2883 || resolution == LDPR_PREEMPTED_REG
2884 || resolution == LDPR_RESOLVED_EXEC
2885 || resolution == LDPR_RESOLVED_DYN);
2886 }
2887
2888 /* Return true when NODE is known to be used from other (non-LTO) object file.
2889 Known only when doing LTO via linker plugin. */
2890
2891 bool
2892 cgraph_used_from_object_file_p (struct cgraph_node *node)
2893 {
2894 struct cgraph_node *alias;
2895
2896 gcc_assert (!node->global.inlined_to);
2897 if (!TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl))
2898 return false;
2899 if (resolution_used_from_other_file_p (node->resolution))
2900 return true;
2901 for (alias = node->same_body; alias; alias = alias->next)
2902 if (TREE_PUBLIC (alias->decl)
2903 && resolution_used_from_other_file_p (alias->resolution))
2904 return true;
2905 return false;
2906 }
2907
2908 #include "gt-cgraph.h"