* gimple-fold.c (gimple_fold_stmt_to_constant_1): Avoid warning.
[gcc.git] / gcc / cgraph.c
1 /* Callgraph handling code.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This file contains basic routines manipulating call graph
23
24 The callgraph:
25
26 The call-graph is data structure designed for intra-procedural optimization
27 but it is also used in non-unit-at-a-time compilation to allow easier code
28 sharing.
29
30 The call-graph consist of nodes and edges represented via linked lists.
31 Each function (external or not) corresponds to the unique node.
32
33 The mapping from declarations to call-graph nodes is done using hash table
34 based on DECL_UID. The call-graph nodes are created lazily using
35 cgraph_node function when called for unknown declaration.
36
37 The callgraph at the moment does not represent all indirect calls or calls
38 from other compilation units. Flag NEEDED is set for each node that may be
39 accessed in such an invisible way and it shall be considered an entry point
40 to the callgraph.
41
42 On the other hand, the callgraph currently does contain some edges for
43 indirect calls with unknown callees which can be accessed through
44 indirect_calls field of a node. It should be noted however that at the
45 moment only calls which are potential candidates for indirect inlining are
46 added there.
47
48 Interprocedural information:
49
50 Callgraph is place to store data needed for interprocedural optimization.
51 All data structures are divided into three components: local_info that
52 is produced while analyzing the function, global_info that is result
53 of global walking of the callgraph on the end of compilation and
54 rtl_info used by RTL backend to propagate data from already compiled
55 functions to their callers.
56
57 Moreover, each node has a uid which can be used to keep information in
58 on-the-side arrays. UIDs are reused and therefore reasonably dense.
59
60 Inlining plans:
61
62 The function inlining information is decided in advance and maintained
63 in the callgraph as so called inline plan.
64 For each inlined call, the callee's node is cloned to represent the
65 new function copy produced by inliner.
66 Each inlined call gets a unique corresponding clone node of the callee
67 and the data structure is updated while inlining is performed, so
68 the clones are eliminated and their callee edges redirected to the
69 caller.
70
71 Each edge has "inline_failed" field. When the field is set to NULL,
72 the call will be inlined. When it is non-NULL it contains a reason
73 why inlining wasn't performed. */
74
75 #include "config.h"
76 #include "system.h"
77 #include "coretypes.h"
78 #include "tm.h"
79 #include "tree.h"
80 #include "tree-inline.h"
81 #include "langhooks.h"
82 #include "hashtab.h"
83 #include "toplev.h"
84 #include "flags.h"
85 #include "ggc.h"
86 #include "debug.h"
87 #include "target.h"
88 #include "basic-block.h"
89 #include "cgraph.h"
90 #include "output.h"
91 #include "intl.h"
92 #include "gimple.h"
93 #include "tree-dump.h"
94 #include "tree-flow.h"
95 #include "value-prof.h"
96 #include "except.h"
97 #include "diagnostic-core.h"
98 #include "rtl.h"
99 #include "ipa-utils.h"
100 #include "lto-streamer.h"
101 #include "ipa-inline.h"
102 #include "cfgloop.h"
103
104 const char * const ld_plugin_symbol_resolution_names[]=
105 {
106 "",
107 "undef",
108 "prevailing_def",
109 "prevailing_def_ironly",
110 "preempted_reg",
111 "preempted_ir",
112 "resolved_ir",
113 "resolved_exec",
114 "resolved_dyn",
115 "prevailing_def_ironly_exp"
116 };
117
118 static void cgraph_node_remove_callers (struct cgraph_node *node);
119 static inline void cgraph_edge_remove_caller (struct cgraph_edge *e);
120 static inline void cgraph_edge_remove_callee (struct cgraph_edge *e);
121
122 /* Hash table used to convert declarations into nodes. */
123 static GTY((param_is (struct cgraph_node))) htab_t cgraph_hash;
124 /* Hash table used to convert assembler names into nodes. */
125 static GTY((param_is (struct cgraph_node))) htab_t assembler_name_hash;
126
127 /* The linked list of cgraph nodes. */
128 struct cgraph_node *cgraph_nodes;
129
130 /* Queue of cgraph nodes scheduled to be lowered. */
131 struct cgraph_node *cgraph_nodes_queue;
132
133 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
134 secondary queue used during optimization to accommodate passes that
135 may generate new functions that need to be optimized and expanded. */
136 struct cgraph_node *cgraph_new_nodes;
137
138 /* Number of nodes in existence. */
139 int cgraph_n_nodes;
140
141 /* Maximal uid used in cgraph nodes. */
142 int cgraph_max_uid;
143
144 /* Maximal uid used in cgraph edges. */
145 int cgraph_edge_max_uid;
146
147 /* Set when whole unit has been analyzed so we can access global info. */
148 bool cgraph_global_info_ready = false;
149
150 /* What state callgraph is in right now. */
151 enum cgraph_state cgraph_state = CGRAPH_STATE_CONSTRUCTION;
152
153 /* Set when the cgraph is fully build and the basic flags are computed. */
154 bool cgraph_function_flags_ready = false;
155
156 /* Linked list of cgraph asm nodes. */
157 struct cgraph_asm_node *cgraph_asm_nodes;
158
159 /* Last node in cgraph_asm_nodes. */
160 static GTY(()) struct cgraph_asm_node *cgraph_asm_last_node;
161
162 /* The order index of the next cgraph node to be created. This is
163 used so that we can sort the cgraph nodes in order by when we saw
164 them, to support -fno-toplevel-reorder. */
165 int cgraph_order;
166
167 /* List of hooks triggered on cgraph_edge events. */
168 struct cgraph_edge_hook_list {
169 cgraph_edge_hook hook;
170 void *data;
171 struct cgraph_edge_hook_list *next;
172 };
173
174 /* List of hooks triggered on cgraph_node events. */
175 struct cgraph_node_hook_list {
176 cgraph_node_hook hook;
177 void *data;
178 struct cgraph_node_hook_list *next;
179 };
180
181 /* List of hooks triggered on events involving two cgraph_edges. */
182 struct cgraph_2edge_hook_list {
183 cgraph_2edge_hook hook;
184 void *data;
185 struct cgraph_2edge_hook_list *next;
186 };
187
188 /* List of hooks triggered on events involving two cgraph_nodes. */
189 struct cgraph_2node_hook_list {
190 cgraph_2node_hook hook;
191 void *data;
192 struct cgraph_2node_hook_list *next;
193 };
194
195 /* List of hooks triggered when an edge is removed. */
196 struct cgraph_edge_hook_list *first_cgraph_edge_removal_hook;
197 /* List of hooks triggered when a node is removed. */
198 struct cgraph_node_hook_list *first_cgraph_node_removal_hook;
199 /* List of hooks triggered when an edge is duplicated. */
200 struct cgraph_2edge_hook_list *first_cgraph_edge_duplicated_hook;
201 /* List of hooks triggered when a node is duplicated. */
202 struct cgraph_2node_hook_list *first_cgraph_node_duplicated_hook;
203 /* List of hooks triggered when an function is inserted. */
204 struct cgraph_node_hook_list *first_cgraph_function_insertion_hook;
205
206 /* Head of a linked list of unused (freed) call graph nodes.
207 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
208 static GTY(()) struct cgraph_node *free_nodes;
209 /* Head of a linked list of unused (freed) call graph edges.
210 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
211 static GTY(()) struct cgraph_edge *free_edges;
212
213 /* Did procss_same_body_aliases run? */
214 bool same_body_aliases_done;
215
216 /* Macros to access the next item in the list of free cgraph nodes and
217 edges. */
218 #define NEXT_FREE_NODE(NODE) (NODE)->next
219 #define NEXT_FREE_EDGE(EDGE) (EDGE)->prev_caller
220
221 /* Register HOOK to be called with DATA on each removed edge. */
222 struct cgraph_edge_hook_list *
223 cgraph_add_edge_removal_hook (cgraph_edge_hook hook, void *data)
224 {
225 struct cgraph_edge_hook_list *entry;
226 struct cgraph_edge_hook_list **ptr = &first_cgraph_edge_removal_hook;
227
228 entry = (struct cgraph_edge_hook_list *) xmalloc (sizeof (*entry));
229 entry->hook = hook;
230 entry->data = data;
231 entry->next = NULL;
232 while (*ptr)
233 ptr = &(*ptr)->next;
234 *ptr = entry;
235 return entry;
236 }
237
238 /* Remove ENTRY from the list of hooks called on removing edges. */
239 void
240 cgraph_remove_edge_removal_hook (struct cgraph_edge_hook_list *entry)
241 {
242 struct cgraph_edge_hook_list **ptr = &first_cgraph_edge_removal_hook;
243
244 while (*ptr != entry)
245 ptr = &(*ptr)->next;
246 *ptr = entry->next;
247 free (entry);
248 }
249
250 /* Call all edge removal hooks. */
251 static void
252 cgraph_call_edge_removal_hooks (struct cgraph_edge *e)
253 {
254 struct cgraph_edge_hook_list *entry = first_cgraph_edge_removal_hook;
255 while (entry)
256 {
257 entry->hook (e, entry->data);
258 entry = entry->next;
259 }
260 }
261
262 /* Register HOOK to be called with DATA on each removed node. */
263 struct cgraph_node_hook_list *
264 cgraph_add_node_removal_hook (cgraph_node_hook hook, void *data)
265 {
266 struct cgraph_node_hook_list *entry;
267 struct cgraph_node_hook_list **ptr = &first_cgraph_node_removal_hook;
268
269 entry = (struct cgraph_node_hook_list *) xmalloc (sizeof (*entry));
270 entry->hook = hook;
271 entry->data = data;
272 entry->next = NULL;
273 while (*ptr)
274 ptr = &(*ptr)->next;
275 *ptr = entry;
276 return entry;
277 }
278
279 /* Remove ENTRY from the list of hooks called on removing nodes. */
280 void
281 cgraph_remove_node_removal_hook (struct cgraph_node_hook_list *entry)
282 {
283 struct cgraph_node_hook_list **ptr = &first_cgraph_node_removal_hook;
284
285 while (*ptr != entry)
286 ptr = &(*ptr)->next;
287 *ptr = entry->next;
288 free (entry);
289 }
290
291 /* Call all node removal hooks. */
292 static void
293 cgraph_call_node_removal_hooks (struct cgraph_node *node)
294 {
295 struct cgraph_node_hook_list *entry = first_cgraph_node_removal_hook;
296 while (entry)
297 {
298 entry->hook (node, entry->data);
299 entry = entry->next;
300 }
301 }
302
303 /* Register HOOK to be called with DATA on each inserted node. */
304 struct cgraph_node_hook_list *
305 cgraph_add_function_insertion_hook (cgraph_node_hook hook, void *data)
306 {
307 struct cgraph_node_hook_list *entry;
308 struct cgraph_node_hook_list **ptr = &first_cgraph_function_insertion_hook;
309
310 entry = (struct cgraph_node_hook_list *) xmalloc (sizeof (*entry));
311 entry->hook = hook;
312 entry->data = data;
313 entry->next = NULL;
314 while (*ptr)
315 ptr = &(*ptr)->next;
316 *ptr = entry;
317 return entry;
318 }
319
320 /* Remove ENTRY from the list of hooks called on inserted nodes. */
321 void
322 cgraph_remove_function_insertion_hook (struct cgraph_node_hook_list *entry)
323 {
324 struct cgraph_node_hook_list **ptr = &first_cgraph_function_insertion_hook;
325
326 while (*ptr != entry)
327 ptr = &(*ptr)->next;
328 *ptr = entry->next;
329 free (entry);
330 }
331
332 /* Call all node insertion hooks. */
333 void
334 cgraph_call_function_insertion_hooks (struct cgraph_node *node)
335 {
336 struct cgraph_node_hook_list *entry = first_cgraph_function_insertion_hook;
337 while (entry)
338 {
339 entry->hook (node, entry->data);
340 entry = entry->next;
341 }
342 }
343
344 /* Register HOOK to be called with DATA on each duplicated edge. */
345 struct cgraph_2edge_hook_list *
346 cgraph_add_edge_duplication_hook (cgraph_2edge_hook hook, void *data)
347 {
348 struct cgraph_2edge_hook_list *entry;
349 struct cgraph_2edge_hook_list **ptr = &first_cgraph_edge_duplicated_hook;
350
351 entry = (struct cgraph_2edge_hook_list *) xmalloc (sizeof (*entry));
352 entry->hook = hook;
353 entry->data = data;
354 entry->next = NULL;
355 while (*ptr)
356 ptr = &(*ptr)->next;
357 *ptr = entry;
358 return entry;
359 }
360
361 /* Remove ENTRY from the list of hooks called on duplicating edges. */
362 void
363 cgraph_remove_edge_duplication_hook (struct cgraph_2edge_hook_list *entry)
364 {
365 struct cgraph_2edge_hook_list **ptr = &first_cgraph_edge_duplicated_hook;
366
367 while (*ptr != entry)
368 ptr = &(*ptr)->next;
369 *ptr = entry->next;
370 free (entry);
371 }
372
373 /* Call all edge duplication hooks. */
374 static void
375 cgraph_call_edge_duplication_hooks (struct cgraph_edge *cs1,
376 struct cgraph_edge *cs2)
377 {
378 struct cgraph_2edge_hook_list *entry = first_cgraph_edge_duplicated_hook;
379 while (entry)
380 {
381 entry->hook (cs1, cs2, entry->data);
382 entry = entry->next;
383 }
384 }
385
386 /* Register HOOK to be called with DATA on each duplicated node. */
387 struct cgraph_2node_hook_list *
388 cgraph_add_node_duplication_hook (cgraph_2node_hook hook, void *data)
389 {
390 struct cgraph_2node_hook_list *entry;
391 struct cgraph_2node_hook_list **ptr = &first_cgraph_node_duplicated_hook;
392
393 entry = (struct cgraph_2node_hook_list *) xmalloc (sizeof (*entry));
394 entry->hook = hook;
395 entry->data = data;
396 entry->next = NULL;
397 while (*ptr)
398 ptr = &(*ptr)->next;
399 *ptr = entry;
400 return entry;
401 }
402
403 /* Remove ENTRY from the list of hooks called on duplicating nodes. */
404 void
405 cgraph_remove_node_duplication_hook (struct cgraph_2node_hook_list *entry)
406 {
407 struct cgraph_2node_hook_list **ptr = &first_cgraph_node_duplicated_hook;
408
409 while (*ptr != entry)
410 ptr = &(*ptr)->next;
411 *ptr = entry->next;
412 free (entry);
413 }
414
415 /* Call all node duplication hooks. */
416 void
417 cgraph_call_node_duplication_hooks (struct cgraph_node *node1,
418 struct cgraph_node *node2)
419 {
420 struct cgraph_2node_hook_list *entry = first_cgraph_node_duplicated_hook;
421 while (entry)
422 {
423 entry->hook (node1, node2, entry->data);
424 entry = entry->next;
425 }
426 }
427
428 /* Returns a hash code for P. */
429
430 static hashval_t
431 hash_node (const void *p)
432 {
433 const struct cgraph_node *n = (const struct cgraph_node *) p;
434 return (hashval_t) DECL_UID (n->decl);
435 }
436
437
438 /* Returns nonzero if P1 and P2 are equal. */
439
440 static int
441 eq_node (const void *p1, const void *p2)
442 {
443 const struct cgraph_node *n1 = (const struct cgraph_node *) p1;
444 const struct cgraph_node *n2 = (const struct cgraph_node *) p2;
445 return DECL_UID (n1->decl) == DECL_UID (n2->decl);
446 }
447
448 /* Allocate new callgraph node. */
449
450 static inline struct cgraph_node *
451 cgraph_allocate_node (void)
452 {
453 struct cgraph_node *node;
454
455 if (free_nodes)
456 {
457 node = free_nodes;
458 free_nodes = NEXT_FREE_NODE (node);
459 }
460 else
461 {
462 node = ggc_alloc_cleared_cgraph_node ();
463 node->uid = cgraph_max_uid++;
464 }
465
466 return node;
467 }
468
469 /* Allocate new callgraph node and insert it into basic data structures. */
470
471 static struct cgraph_node *
472 cgraph_create_node_1 (void)
473 {
474 struct cgraph_node *node = cgraph_allocate_node ();
475
476 node->next = cgraph_nodes;
477 node->order = cgraph_order++;
478 if (cgraph_nodes)
479 cgraph_nodes->previous = node;
480 node->previous = NULL;
481 node->frequency = NODE_FREQUENCY_NORMAL;
482 node->count_materialization_scale = REG_BR_PROB_BASE;
483 ipa_empty_ref_list (&node->ref_list);
484 cgraph_nodes = node;
485 cgraph_n_nodes++;
486 return node;
487 }
488
489 /* Return cgraph node assigned to DECL. Create new one when needed. */
490
491 struct cgraph_node *
492 cgraph_create_node (tree decl)
493 {
494 struct cgraph_node key, *node, **slot;
495
496 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
497
498 if (!cgraph_hash)
499 cgraph_hash = htab_create_ggc (10, hash_node, eq_node, NULL);
500
501 key.decl = decl;
502 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
503 gcc_assert (!*slot);
504
505 node = cgraph_create_node_1 ();
506 node->decl = decl;
507 *slot = node;
508 if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
509 {
510 node->origin = cgraph_get_create_node (DECL_CONTEXT (decl));
511 node->next_nested = node->origin->nested;
512 node->origin->nested = node;
513 }
514 if (assembler_name_hash)
515 {
516 void **aslot;
517 tree name = DECL_ASSEMBLER_NAME (decl);
518
519 aslot = htab_find_slot_with_hash (assembler_name_hash, name,
520 decl_assembler_name_hash (name),
521 INSERT);
522 /* We can have multiple declarations with same assembler name. For C++
523 it is __builtin_strlen and strlen, for instance. Do we need to
524 record them all? Original implementation marked just first one
525 so lets hope for the best. */
526 if (*aslot == NULL)
527 *aslot = node;
528 }
529 return node;
530 }
531
532 /* Try to find a call graph node for declaration DECL and if it does not exist,
533 create it. */
534
535 struct cgraph_node *
536 cgraph_get_create_node (tree decl)
537 {
538 struct cgraph_node *node;
539
540 node = cgraph_get_node (decl);
541 if (node)
542 return node;
543
544 return cgraph_create_node (decl);
545 }
546
547 /* Mark ALIAS as an alias to DECL. DECL_NODE is cgraph node representing
548 the function body is associated with (not neccesarily cgraph_node (DECL). */
549
550 struct cgraph_node *
551 cgraph_create_function_alias (tree alias, tree decl)
552 {
553 struct cgraph_node *alias_node;
554
555 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
556 gcc_assert (TREE_CODE (alias) == FUNCTION_DECL);
557 alias_node = cgraph_get_create_node (alias);
558 gcc_assert (!alias_node->local.finalized);
559 alias_node->thunk.alias = decl;
560 alias_node->local.finalized = true;
561 alias_node->alias = 1;
562
563 if ((TREE_PUBLIC (alias) && !DECL_COMDAT (alias) && !DECL_EXTERNAL (alias))
564 || (DECL_VIRTUAL_P (alias)
565 && (DECL_COMDAT (alias) || DECL_EXTERNAL (alias))))
566 cgraph_mark_reachable_node (alias_node);
567 return alias_node;
568 }
569
570 /* Attempt to mark ALIAS as an alias to DECL. Return alias node if successful
571 and NULL otherwise.
572 Same body aliases are output whenever the body of DECL is output,
573 and cgraph_get_node (ALIAS) transparently returns cgraph_get_node (DECL). */
574
575 struct cgraph_node *
576 cgraph_same_body_alias (struct cgraph_node *decl_node ATTRIBUTE_UNUSED, tree alias, tree decl)
577 {
578 struct cgraph_node *n;
579 #ifndef ASM_OUTPUT_DEF
580 /* If aliases aren't supported by the assembler, fail. */
581 return NULL;
582 #endif
583 /* Langhooks can create same body aliases of symbols not defined.
584 Those are useless. Drop them on the floor. */
585 if (cgraph_global_info_ready)
586 return NULL;
587
588 n = cgraph_create_function_alias (alias, decl);
589 n->same_body_alias = true;
590 if (same_body_aliases_done)
591 ipa_record_reference (n, NULL, cgraph_get_node (decl), NULL, IPA_REF_ALIAS,
592 NULL);
593 return n;
594 }
595
596 /* Add thunk alias into callgraph. The alias declaration is ALIAS and it
597 aliases DECL with an adjustments made into the first parameter.
598 See comments in thunk_adjust for detail on the parameters. */
599
600 struct cgraph_node *
601 cgraph_add_thunk (struct cgraph_node *decl_node ATTRIBUTE_UNUSED,
602 tree alias, tree decl,
603 bool this_adjusting,
604 HOST_WIDE_INT fixed_offset, HOST_WIDE_INT virtual_value,
605 tree virtual_offset,
606 tree real_alias)
607 {
608 struct cgraph_node *node;
609
610 node = cgraph_get_node (alias);
611 if (node)
612 {
613 gcc_assert (node->local.finalized);
614 gcc_assert (!node->alias);
615 gcc_assert (!node->thunk.thunk_p);
616 cgraph_remove_node (node);
617 }
618
619 node = cgraph_create_node (alias);
620 gcc_checking_assert (!virtual_offset
621 || double_int_equal_p
622 (tree_to_double_int (virtual_offset),
623 shwi_to_double_int (virtual_value)));
624 node->thunk.fixed_offset = fixed_offset;
625 node->thunk.this_adjusting = this_adjusting;
626 node->thunk.virtual_value = virtual_value;
627 node->thunk.virtual_offset_p = virtual_offset != NULL;
628 node->thunk.alias = real_alias;
629 node->thunk.thunk_p = true;
630 node->local.finalized = true;
631
632 if (cgraph_decide_is_function_needed (node, decl))
633 cgraph_mark_needed_node (node);
634
635 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
636 || (DECL_VIRTUAL_P (decl)
637 && (DECL_COMDAT (decl) || DECL_EXTERNAL (decl))))
638 cgraph_mark_reachable_node (node);
639
640 return node;
641 }
642
643 /* Returns the cgraph node assigned to DECL or NULL if no cgraph node
644 is assigned. */
645
646 struct cgraph_node *
647 cgraph_get_node (const_tree decl)
648 {
649 struct cgraph_node key, *node = NULL, **slot;
650
651 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
652
653 if (!cgraph_hash)
654 return NULL;
655
656 key.decl = CONST_CAST2 (tree, const_tree, decl);
657
658 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key,
659 NO_INSERT);
660
661 if (slot && *slot)
662 node = *slot;
663 return node;
664 }
665
666 /* Insert already constructed node into hashtable. */
667
668 void
669 cgraph_insert_node_to_hashtable (struct cgraph_node *node)
670 {
671 struct cgraph_node **slot;
672
673 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, node, INSERT);
674
675 gcc_assert (!*slot);
676 *slot = node;
677 }
678
679 /* Returns a hash code for P. */
680
681 static hashval_t
682 hash_node_by_assembler_name (const void *p)
683 {
684 const struct cgraph_node *n = (const struct cgraph_node *) p;
685 return (hashval_t) decl_assembler_name_hash (DECL_ASSEMBLER_NAME (n->decl));
686 }
687
688 /* Returns nonzero if P1 and P2 are equal. */
689
690 static int
691 eq_assembler_name (const void *p1, const void *p2)
692 {
693 const struct cgraph_node *n1 = (const struct cgraph_node *) p1;
694 const_tree name = (const_tree)p2;
695 return (decl_assembler_name_equal (n1->decl, name));
696 }
697
698 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
699 Return NULL if there's no such node. */
700
701 struct cgraph_node *
702 cgraph_node_for_asm (tree asmname)
703 {
704 struct cgraph_node *node;
705 void **slot;
706
707 if (!assembler_name_hash)
708 {
709 assembler_name_hash =
710 htab_create_ggc (10, hash_node_by_assembler_name, eq_assembler_name,
711 NULL);
712 for (node = cgraph_nodes; node; node = node->next)
713 if (!node->global.inlined_to)
714 {
715 tree name = DECL_ASSEMBLER_NAME (node->decl);
716 slot = htab_find_slot_with_hash (assembler_name_hash, name,
717 decl_assembler_name_hash (name),
718 INSERT);
719 /* We can have multiple declarations with same assembler name. For C++
720 it is __builtin_strlen and strlen, for instance. Do we need to
721 record them all? Original implementation marked just first one
722 so lets hope for the best. */
723 if (!*slot)
724 *slot = node;
725 }
726 }
727
728 slot = htab_find_slot_with_hash (assembler_name_hash, asmname,
729 decl_assembler_name_hash (asmname),
730 NO_INSERT);
731
732 if (slot)
733 {
734 node = (struct cgraph_node *) *slot;
735 return node;
736 }
737 return NULL;
738 }
739
740 /* Returns a hash value for X (which really is a die_struct). */
741
742 static hashval_t
743 edge_hash (const void *x)
744 {
745 return htab_hash_pointer (((const struct cgraph_edge *) x)->call_stmt);
746 }
747
748 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
749
750 static int
751 edge_eq (const void *x, const void *y)
752 {
753 return ((const struct cgraph_edge *) x)->call_stmt == y;
754 }
755
756 /* Add call graph edge E to call site hash of its caller. */
757
758 static inline void
759 cgraph_add_edge_to_call_site_hash (struct cgraph_edge *e)
760 {
761 void **slot;
762 slot = htab_find_slot_with_hash (e->caller->call_site_hash,
763 e->call_stmt,
764 htab_hash_pointer (e->call_stmt),
765 INSERT);
766 gcc_assert (!*slot);
767 *slot = e;
768 }
769
770 /* Return the callgraph edge representing the GIMPLE_CALL statement
771 CALL_STMT. */
772
773 struct cgraph_edge *
774 cgraph_edge (struct cgraph_node *node, gimple call_stmt)
775 {
776 struct cgraph_edge *e, *e2;
777 int n = 0;
778
779 if (node->call_site_hash)
780 return (struct cgraph_edge *)
781 htab_find_with_hash (node->call_site_hash, call_stmt,
782 htab_hash_pointer (call_stmt));
783
784 /* This loop may turn out to be performance problem. In such case adding
785 hashtables into call nodes with very many edges is probably best
786 solution. It is not good idea to add pointer into CALL_EXPR itself
787 because we want to make possible having multiple cgraph nodes representing
788 different clones of the same body before the body is actually cloned. */
789 for (e = node->callees; e; e = e->next_callee)
790 {
791 if (e->call_stmt == call_stmt)
792 break;
793 n++;
794 }
795
796 if (!e)
797 for (e = node->indirect_calls; e; e = e->next_callee)
798 {
799 if (e->call_stmt == call_stmt)
800 break;
801 n++;
802 }
803
804 if (n > 100)
805 {
806 node->call_site_hash = htab_create_ggc (120, edge_hash, edge_eq, NULL);
807 for (e2 = node->callees; e2; e2 = e2->next_callee)
808 cgraph_add_edge_to_call_site_hash (e2);
809 for (e2 = node->indirect_calls; e2; e2 = e2->next_callee)
810 cgraph_add_edge_to_call_site_hash (e2);
811 }
812
813 return e;
814 }
815
816
817 /* Change field call_stmt of edge E to NEW_STMT. */
818
819 void
820 cgraph_set_call_stmt (struct cgraph_edge *e, gimple new_stmt)
821 {
822 tree decl;
823
824 if (e->caller->call_site_hash)
825 {
826 htab_remove_elt_with_hash (e->caller->call_site_hash,
827 e->call_stmt,
828 htab_hash_pointer (e->call_stmt));
829 }
830
831 e->call_stmt = new_stmt;
832 if (e->indirect_unknown_callee
833 && (decl = gimple_call_fndecl (new_stmt)))
834 {
835 /* Constant propagation (and possibly also inlining?) can turn an
836 indirect call into a direct one. */
837 struct cgraph_node *new_callee = cgraph_get_node (decl);
838
839 gcc_checking_assert (new_callee);
840 cgraph_make_edge_direct (e, new_callee);
841 }
842
843 push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
844 e->can_throw_external = stmt_can_throw_external (new_stmt);
845 pop_cfun ();
846 if (e->caller->call_site_hash)
847 cgraph_add_edge_to_call_site_hash (e);
848 }
849
850 /* Like cgraph_set_call_stmt but walk the clone tree and update all
851 clones sharing the same function body. */
852
853 void
854 cgraph_set_call_stmt_including_clones (struct cgraph_node *orig,
855 gimple old_stmt, gimple new_stmt)
856 {
857 struct cgraph_node *node;
858 struct cgraph_edge *edge = cgraph_edge (orig, old_stmt);
859
860 if (edge)
861 cgraph_set_call_stmt (edge, new_stmt);
862
863 node = orig->clones;
864 if (node)
865 while (node != orig)
866 {
867 struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
868 if (edge)
869 cgraph_set_call_stmt (edge, new_stmt);
870 if (node->clones)
871 node = node->clones;
872 else if (node->next_sibling_clone)
873 node = node->next_sibling_clone;
874 else
875 {
876 while (node != orig && !node->next_sibling_clone)
877 node = node->clone_of;
878 if (node != orig)
879 node = node->next_sibling_clone;
880 }
881 }
882 }
883
884 /* Like cgraph_create_edge walk the clone tree and update all clones sharing
885 same function body. If clones already have edge for OLD_STMT; only
886 update the edge same way as cgraph_set_call_stmt_including_clones does.
887
888 TODO: COUNT and LOOP_DEPTH should be properly distributed based on relative
889 frequencies of the clones. */
890
891 void
892 cgraph_create_edge_including_clones (struct cgraph_node *orig,
893 struct cgraph_node *callee,
894 gimple old_stmt,
895 gimple stmt, gcov_type count,
896 int freq,
897 cgraph_inline_failed_t reason)
898 {
899 struct cgraph_node *node;
900 struct cgraph_edge *edge;
901
902 if (!cgraph_edge (orig, stmt))
903 {
904 edge = cgraph_create_edge (orig, callee, stmt, count, freq);
905 edge->inline_failed = reason;
906 }
907
908 node = orig->clones;
909 if (node)
910 while (node != orig)
911 {
912 struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
913
914 /* It is possible that clones already contain the edge while
915 master didn't. Either we promoted indirect call into direct
916 call in the clone or we are processing clones of unreachable
917 master where edges has been removed. */
918 if (edge)
919 cgraph_set_call_stmt (edge, stmt);
920 else if (!cgraph_edge (node, stmt))
921 {
922 edge = cgraph_create_edge (node, callee, stmt, count,
923 freq);
924 edge->inline_failed = reason;
925 }
926
927 if (node->clones)
928 node = node->clones;
929 else if (node->next_sibling_clone)
930 node = node->next_sibling_clone;
931 else
932 {
933 while (node != orig && !node->next_sibling_clone)
934 node = node->clone_of;
935 if (node != orig)
936 node = node->next_sibling_clone;
937 }
938 }
939 }
940
941 /* Allocate a cgraph_edge structure and fill it with data according to the
942 parameters of which only CALLEE can be NULL (when creating an indirect call
943 edge). */
944
945 static struct cgraph_edge *
946 cgraph_create_edge_1 (struct cgraph_node *caller, struct cgraph_node *callee,
947 gimple call_stmt, gcov_type count, int freq)
948 {
949 struct cgraph_edge *edge;
950
951 /* LTO does not actually have access to the call_stmt since these
952 have not been loaded yet. */
953 if (call_stmt)
954 {
955 /* This is a rather expensive check possibly triggering
956 construction of call stmt hashtable. */
957 gcc_checking_assert (!cgraph_edge (caller, call_stmt));
958
959 gcc_assert (is_gimple_call (call_stmt));
960 }
961
962 if (free_edges)
963 {
964 edge = free_edges;
965 free_edges = NEXT_FREE_EDGE (edge);
966 }
967 else
968 {
969 edge = ggc_alloc_cgraph_edge ();
970 edge->uid = cgraph_edge_max_uid++;
971 }
972
973 edge->aux = NULL;
974 edge->caller = caller;
975 edge->callee = callee;
976 edge->prev_caller = NULL;
977 edge->next_caller = NULL;
978 edge->prev_callee = NULL;
979 edge->next_callee = NULL;
980
981 edge->count = count;
982 gcc_assert (count >= 0);
983 edge->frequency = freq;
984 gcc_assert (freq >= 0);
985 gcc_assert (freq <= CGRAPH_FREQ_MAX);
986
987 edge->call_stmt = call_stmt;
988 push_cfun (DECL_STRUCT_FUNCTION (caller->decl));
989 edge->can_throw_external
990 = call_stmt ? stmt_can_throw_external (call_stmt) : false;
991 pop_cfun ();
992 if (call_stmt
993 && callee && callee->decl
994 && !gimple_check_call_matching_types (call_stmt, callee->decl))
995 edge->call_stmt_cannot_inline_p = true;
996 else
997 edge->call_stmt_cannot_inline_p = false;
998 if (call_stmt && caller->call_site_hash)
999 cgraph_add_edge_to_call_site_hash (edge);
1000
1001 edge->indirect_info = NULL;
1002 edge->indirect_inlining_edge = 0;
1003
1004 return edge;
1005 }
1006
1007 /* Create edge from CALLER to CALLEE in the cgraph. */
1008
1009 struct cgraph_edge *
1010 cgraph_create_edge (struct cgraph_node *caller, struct cgraph_node *callee,
1011 gimple call_stmt, gcov_type count, int freq)
1012 {
1013 struct cgraph_edge *edge = cgraph_create_edge_1 (caller, callee, call_stmt,
1014 count, freq);
1015
1016 edge->indirect_unknown_callee = 0;
1017 initialize_inline_failed (edge);
1018
1019 edge->next_caller = callee->callers;
1020 if (callee->callers)
1021 callee->callers->prev_caller = edge;
1022 edge->next_callee = caller->callees;
1023 if (caller->callees)
1024 caller->callees->prev_callee = edge;
1025 caller->callees = edge;
1026 callee->callers = edge;
1027
1028 return edge;
1029 }
1030
1031 /* Allocate cgraph_indirect_call_info and set its fields to default values. */
1032
1033 struct cgraph_indirect_call_info *
1034 cgraph_allocate_init_indirect_info (void)
1035 {
1036 struct cgraph_indirect_call_info *ii;
1037
1038 ii = ggc_alloc_cleared_cgraph_indirect_call_info ();
1039 ii->param_index = -1;
1040 return ii;
1041 }
1042
1043 /* Create an indirect edge with a yet-undetermined callee where the call
1044 statement destination is a formal parameter of the caller with index
1045 PARAM_INDEX. */
1046
1047 struct cgraph_edge *
1048 cgraph_create_indirect_edge (struct cgraph_node *caller, gimple call_stmt,
1049 int ecf_flags,
1050 gcov_type count, int freq)
1051 {
1052 struct cgraph_edge *edge = cgraph_create_edge_1 (caller, NULL, call_stmt,
1053 count, freq);
1054
1055 edge->indirect_unknown_callee = 1;
1056 initialize_inline_failed (edge);
1057
1058 edge->indirect_info = cgraph_allocate_init_indirect_info ();
1059 edge->indirect_info->ecf_flags = ecf_flags;
1060
1061 edge->next_callee = caller->indirect_calls;
1062 if (caller->indirect_calls)
1063 caller->indirect_calls->prev_callee = edge;
1064 caller->indirect_calls = edge;
1065
1066 return edge;
1067 }
1068
1069 /* Remove the edge E from the list of the callers of the callee. */
1070
1071 static inline void
1072 cgraph_edge_remove_callee (struct cgraph_edge *e)
1073 {
1074 gcc_assert (!e->indirect_unknown_callee);
1075 if (e->prev_caller)
1076 e->prev_caller->next_caller = e->next_caller;
1077 if (e->next_caller)
1078 e->next_caller->prev_caller = e->prev_caller;
1079 if (!e->prev_caller)
1080 e->callee->callers = e->next_caller;
1081 }
1082
1083 /* Remove the edge E from the list of the callees of the caller. */
1084
1085 static inline void
1086 cgraph_edge_remove_caller (struct cgraph_edge *e)
1087 {
1088 if (e->prev_callee)
1089 e->prev_callee->next_callee = e->next_callee;
1090 if (e->next_callee)
1091 e->next_callee->prev_callee = e->prev_callee;
1092 if (!e->prev_callee)
1093 {
1094 if (e->indirect_unknown_callee)
1095 e->caller->indirect_calls = e->next_callee;
1096 else
1097 e->caller->callees = e->next_callee;
1098 }
1099 if (e->caller->call_site_hash)
1100 htab_remove_elt_with_hash (e->caller->call_site_hash,
1101 e->call_stmt,
1102 htab_hash_pointer (e->call_stmt));
1103 }
1104
1105 /* Put the edge onto the free list. */
1106
1107 static void
1108 cgraph_free_edge (struct cgraph_edge *e)
1109 {
1110 int uid = e->uid;
1111
1112 /* Clear out the edge so we do not dangle pointers. */
1113 memset (e, 0, sizeof (*e));
1114 e->uid = uid;
1115 NEXT_FREE_EDGE (e) = free_edges;
1116 free_edges = e;
1117 }
1118
1119 /* Remove the edge E in the cgraph. */
1120
1121 void
1122 cgraph_remove_edge (struct cgraph_edge *e)
1123 {
1124 /* Call all edge removal hooks. */
1125 cgraph_call_edge_removal_hooks (e);
1126
1127 if (!e->indirect_unknown_callee)
1128 /* Remove from callers list of the callee. */
1129 cgraph_edge_remove_callee (e);
1130
1131 /* Remove from callees list of the callers. */
1132 cgraph_edge_remove_caller (e);
1133
1134 /* Put the edge onto the free list. */
1135 cgraph_free_edge (e);
1136 }
1137
1138 /* Set callee of call graph edge E and add it to the corresponding set of
1139 callers. */
1140
1141 static void
1142 cgraph_set_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
1143 {
1144 e->prev_caller = NULL;
1145 if (n->callers)
1146 n->callers->prev_caller = e;
1147 e->next_caller = n->callers;
1148 n->callers = e;
1149 e->callee = n;
1150 }
1151
1152 /* Redirect callee of E to N. The function does not update underlying
1153 call expression. */
1154
1155 void
1156 cgraph_redirect_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
1157 {
1158 /* Remove from callers list of the current callee. */
1159 cgraph_edge_remove_callee (e);
1160
1161 /* Insert to callers list of the new callee. */
1162 cgraph_set_edge_callee (e, n);
1163 }
1164
1165 /* Make an indirect EDGE with an unknown callee an ordinary edge leading to
1166 CALLEE. DELTA is an integer constant that is to be added to the this
1167 pointer (first parameter) to compensate for skipping a thunk adjustment. */
1168
1169 void
1170 cgraph_make_edge_direct (struct cgraph_edge *edge, struct cgraph_node *callee)
1171 {
1172 edge->indirect_unknown_callee = 0;
1173
1174 /* Get the edge out of the indirect edge list. */
1175 if (edge->prev_callee)
1176 edge->prev_callee->next_callee = edge->next_callee;
1177 if (edge->next_callee)
1178 edge->next_callee->prev_callee = edge->prev_callee;
1179 if (!edge->prev_callee)
1180 edge->caller->indirect_calls = edge->next_callee;
1181
1182 /* Put it into the normal callee list */
1183 edge->prev_callee = NULL;
1184 edge->next_callee = edge->caller->callees;
1185 if (edge->caller->callees)
1186 edge->caller->callees->prev_callee = edge;
1187 edge->caller->callees = edge;
1188
1189 /* Insert to callers list of the new callee. */
1190 cgraph_set_edge_callee (edge, callee);
1191
1192 if (edge->call_stmt)
1193 edge->call_stmt_cannot_inline_p
1194 = !gimple_check_call_matching_types (edge->call_stmt, callee->decl);
1195
1196 /* We need to re-determine the inlining status of the edge. */
1197 initialize_inline_failed (edge);
1198 }
1199
1200
1201 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1202 OLD_STMT changed into NEW_STMT. OLD_CALL is gimple_call_fndecl
1203 of OLD_STMT if it was previously call statement.
1204 If NEW_STMT is NULL, the call has been dropped without any
1205 replacement. */
1206
1207 static void
1208 cgraph_update_edges_for_call_stmt_node (struct cgraph_node *node,
1209 gimple old_stmt, tree old_call,
1210 gimple new_stmt)
1211 {
1212 tree new_call = (new_stmt && is_gimple_call (new_stmt))
1213 ? gimple_call_fndecl (new_stmt) : 0;
1214
1215 /* We are seeing indirect calls, then there is nothing to update. */
1216 if (!new_call && !old_call)
1217 return;
1218 /* See if we turned indirect call into direct call or folded call to one builtin
1219 into different builtin. */
1220 if (old_call != new_call)
1221 {
1222 struct cgraph_edge *e = cgraph_edge (node, old_stmt);
1223 struct cgraph_edge *ne = NULL;
1224 gcov_type count;
1225 int frequency;
1226
1227 if (e)
1228 {
1229 /* See if the edge is already there and has the correct callee. It
1230 might be so because of indirect inlining has already updated
1231 it. We also might've cloned and redirected the edge. */
1232 if (new_call && e->callee)
1233 {
1234 struct cgraph_node *callee = e->callee;
1235 while (callee)
1236 {
1237 if (callee->decl == new_call
1238 || callee->former_clone_of == new_call)
1239 return;
1240 callee = callee->clone_of;
1241 }
1242 }
1243
1244 /* Otherwise remove edge and create new one; we can't simply redirect
1245 since function has changed, so inline plan and other information
1246 attached to edge is invalid. */
1247 count = e->count;
1248 frequency = e->frequency;
1249 cgraph_remove_edge (e);
1250 }
1251 else if (new_call)
1252 {
1253 /* We are seeing new direct call; compute profile info based on BB. */
1254 basic_block bb = gimple_bb (new_stmt);
1255 count = bb->count;
1256 frequency = compute_call_stmt_bb_frequency (current_function_decl,
1257 bb);
1258 }
1259
1260 if (new_call)
1261 {
1262 ne = cgraph_create_edge (node, cgraph_get_create_node (new_call),
1263 new_stmt, count, frequency);
1264 gcc_assert (ne->inline_failed);
1265 }
1266 }
1267 /* We only updated the call stmt; update pointer in cgraph edge.. */
1268 else if (old_stmt != new_stmt)
1269 cgraph_set_call_stmt (cgraph_edge (node, old_stmt), new_stmt);
1270 }
1271
1272 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1273 OLD_STMT changed into NEW_STMT. OLD_DECL is gimple_call_fndecl
1274 of OLD_STMT before it was updated (updating can happen inplace). */
1275
1276 void
1277 cgraph_update_edges_for_call_stmt (gimple old_stmt, tree old_decl, gimple new_stmt)
1278 {
1279 struct cgraph_node *orig = cgraph_get_node (cfun->decl);
1280 struct cgraph_node *node;
1281
1282 gcc_checking_assert (orig);
1283 cgraph_update_edges_for_call_stmt_node (orig, old_stmt, old_decl, new_stmt);
1284 if (orig->clones)
1285 for (node = orig->clones; node != orig;)
1286 {
1287 cgraph_update_edges_for_call_stmt_node (node, old_stmt, old_decl, new_stmt);
1288 if (node->clones)
1289 node = node->clones;
1290 else if (node->next_sibling_clone)
1291 node = node->next_sibling_clone;
1292 else
1293 {
1294 while (node != orig && !node->next_sibling_clone)
1295 node = node->clone_of;
1296 if (node != orig)
1297 node = node->next_sibling_clone;
1298 }
1299 }
1300 }
1301
1302
1303 /* Remove all callees from the node. */
1304
1305 void
1306 cgraph_node_remove_callees (struct cgraph_node *node)
1307 {
1308 struct cgraph_edge *e, *f;
1309
1310 /* It is sufficient to remove the edges from the lists of callers of
1311 the callees. The callee list of the node can be zapped with one
1312 assignment. */
1313 for (e = node->callees; e; e = f)
1314 {
1315 f = e->next_callee;
1316 cgraph_call_edge_removal_hooks (e);
1317 if (!e->indirect_unknown_callee)
1318 cgraph_edge_remove_callee (e);
1319 cgraph_free_edge (e);
1320 }
1321 for (e = node->indirect_calls; e; e = f)
1322 {
1323 f = e->next_callee;
1324 cgraph_call_edge_removal_hooks (e);
1325 if (!e->indirect_unknown_callee)
1326 cgraph_edge_remove_callee (e);
1327 cgraph_free_edge (e);
1328 }
1329 node->indirect_calls = NULL;
1330 node->callees = NULL;
1331 if (node->call_site_hash)
1332 {
1333 htab_delete (node->call_site_hash);
1334 node->call_site_hash = NULL;
1335 }
1336 }
1337
1338 /* Remove all callers from the node. */
1339
1340 static void
1341 cgraph_node_remove_callers (struct cgraph_node *node)
1342 {
1343 struct cgraph_edge *e, *f;
1344
1345 /* It is sufficient to remove the edges from the lists of callees of
1346 the callers. The caller list of the node can be zapped with one
1347 assignment. */
1348 for (e = node->callers; e; e = f)
1349 {
1350 f = e->next_caller;
1351 cgraph_call_edge_removal_hooks (e);
1352 cgraph_edge_remove_caller (e);
1353 cgraph_free_edge (e);
1354 }
1355 node->callers = NULL;
1356 }
1357
1358 /* Release memory used to represent body of function NODE. */
1359
1360 void
1361 cgraph_release_function_body (struct cgraph_node *node)
1362 {
1363 if (DECL_STRUCT_FUNCTION (node->decl))
1364 {
1365 tree old_decl = current_function_decl;
1366 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
1367 if (cfun->cfg
1368 && current_loops)
1369 {
1370 cfun->curr_properties &= ~PROP_loops;
1371 loop_optimizer_finalize ();
1372 }
1373 if (cfun->gimple_df)
1374 {
1375 current_function_decl = node->decl;
1376 delete_tree_ssa ();
1377 delete_tree_cfg_annotations ();
1378 cfun->eh = NULL;
1379 current_function_decl = old_decl;
1380 }
1381 if (cfun->cfg)
1382 {
1383 gcc_assert (dom_computed[0] == DOM_NONE);
1384 gcc_assert (dom_computed[1] == DOM_NONE);
1385 clear_edges ();
1386 }
1387 if (cfun->value_histograms)
1388 free_histograms ();
1389 pop_cfun();
1390 gimple_set_body (node->decl, NULL);
1391 VEC_free (ipa_opt_pass, heap,
1392 node->ipa_transforms_to_apply);
1393 /* Struct function hangs a lot of data that would leak if we didn't
1394 removed all pointers to it. */
1395 ggc_free (DECL_STRUCT_FUNCTION (node->decl));
1396 DECL_STRUCT_FUNCTION (node->decl) = NULL;
1397 }
1398 DECL_SAVED_TREE (node->decl) = NULL;
1399 /* If the node is abstract and needed, then do not clear DECL_INITIAL
1400 of its associated function function declaration because it's
1401 needed to emit debug info later. */
1402 if (!node->abstract_and_needed)
1403 DECL_INITIAL (node->decl) = error_mark_node;
1404 }
1405
1406 /* Remove the node from cgraph. */
1407
1408 void
1409 cgraph_remove_node (struct cgraph_node *node)
1410 {
1411 void **slot;
1412 bool kill_body = false;
1413 struct cgraph_node *n;
1414 int uid = node->uid;
1415
1416 cgraph_call_node_removal_hooks (node);
1417 cgraph_node_remove_callers (node);
1418 cgraph_node_remove_callees (node);
1419 ipa_remove_all_references (&node->ref_list);
1420 ipa_remove_all_refering (&node->ref_list);
1421 VEC_free (ipa_opt_pass, heap,
1422 node->ipa_transforms_to_apply);
1423
1424 /* Incremental inlining access removed nodes stored in the postorder list.
1425 */
1426 node->needed = node->reachable = false;
1427 for (n = node->nested; n; n = n->next_nested)
1428 n->origin = NULL;
1429 node->nested = NULL;
1430 if (node->origin)
1431 {
1432 struct cgraph_node **node2 = &node->origin->nested;
1433
1434 while (*node2 != node)
1435 node2 = &(*node2)->next_nested;
1436 *node2 = node->next_nested;
1437 }
1438 if (node->previous)
1439 node->previous->next = node->next;
1440 else
1441 cgraph_nodes = node->next;
1442 if (node->next)
1443 node->next->previous = node->previous;
1444 node->next = NULL;
1445 node->previous = NULL;
1446 slot = htab_find_slot (cgraph_hash, node, NO_INSERT);
1447 if (*slot == node)
1448 {
1449 struct cgraph_node *next_inline_clone;
1450
1451 for (next_inline_clone = node->clones;
1452 next_inline_clone && next_inline_clone->decl != node->decl;
1453 next_inline_clone = next_inline_clone->next_sibling_clone)
1454 ;
1455
1456 /* If there is inline clone of the node being removed, we need
1457 to put it into the position of removed node and reorganize all
1458 other clones to be based on it. */
1459 if (next_inline_clone)
1460 {
1461 struct cgraph_node *n;
1462 struct cgraph_node *new_clones;
1463
1464 *slot = next_inline_clone;
1465
1466 /* Unlink inline clone from the list of clones of removed node. */
1467 if (next_inline_clone->next_sibling_clone)
1468 next_inline_clone->next_sibling_clone->prev_sibling_clone
1469 = next_inline_clone->prev_sibling_clone;
1470 if (next_inline_clone->prev_sibling_clone)
1471 {
1472 gcc_assert (node->clones != next_inline_clone);
1473 next_inline_clone->prev_sibling_clone->next_sibling_clone
1474 = next_inline_clone->next_sibling_clone;
1475 }
1476 else
1477 {
1478 gcc_assert (node->clones == next_inline_clone);
1479 node->clones = next_inline_clone->next_sibling_clone;
1480 }
1481
1482 new_clones = node->clones;
1483 node->clones = NULL;
1484
1485 /* Copy clone info. */
1486 next_inline_clone->clone = node->clone;
1487
1488 /* Now place it into clone tree at same level at NODE. */
1489 next_inline_clone->clone_of = node->clone_of;
1490 next_inline_clone->prev_sibling_clone = NULL;
1491 next_inline_clone->next_sibling_clone = NULL;
1492 if (node->clone_of)
1493 {
1494 if (node->clone_of->clones)
1495 node->clone_of->clones->prev_sibling_clone = next_inline_clone;
1496 next_inline_clone->next_sibling_clone = node->clone_of->clones;
1497 node->clone_of->clones = next_inline_clone;
1498 }
1499
1500 /* Merge the clone list. */
1501 if (new_clones)
1502 {
1503 if (!next_inline_clone->clones)
1504 next_inline_clone->clones = new_clones;
1505 else
1506 {
1507 n = next_inline_clone->clones;
1508 while (n->next_sibling_clone)
1509 n = n->next_sibling_clone;
1510 n->next_sibling_clone = new_clones;
1511 new_clones->prev_sibling_clone = n;
1512 }
1513 }
1514
1515 /* Update clone_of pointers. */
1516 n = new_clones;
1517 while (n)
1518 {
1519 n->clone_of = next_inline_clone;
1520 n = n->next_sibling_clone;
1521 }
1522 }
1523 else
1524 {
1525 htab_clear_slot (cgraph_hash, slot);
1526 kill_body = true;
1527 }
1528
1529 }
1530 if (node->prev_sibling_clone)
1531 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
1532 else if (node->clone_of)
1533 node->clone_of->clones = node->next_sibling_clone;
1534 if (node->next_sibling_clone)
1535 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
1536 if (node->clones)
1537 {
1538 struct cgraph_node *n, *next;
1539
1540 if (node->clone_of)
1541 {
1542 for (n = node->clones; n->next_sibling_clone; n = n->next_sibling_clone)
1543 n->clone_of = node->clone_of;
1544 n->clone_of = node->clone_of;
1545 n->next_sibling_clone = node->clone_of->clones;
1546 if (node->clone_of->clones)
1547 node->clone_of->clones->prev_sibling_clone = n;
1548 node->clone_of->clones = node->clones;
1549 }
1550 else
1551 {
1552 /* We are removing node with clones. this makes clones inconsistent,
1553 but assume they will be removed subsequently and just keep clone
1554 tree intact. This can happen in unreachable function removal since
1555 we remove unreachable functions in random order, not by bottom-up
1556 walk of clone trees. */
1557 for (n = node->clones; n; n = next)
1558 {
1559 next = n->next_sibling_clone;
1560 n->next_sibling_clone = NULL;
1561 n->prev_sibling_clone = NULL;
1562 n->clone_of = NULL;
1563 }
1564 }
1565 }
1566
1567 if (node->same_comdat_group)
1568 {
1569 struct cgraph_node *prev;
1570 for (prev = node->same_comdat_group;
1571 prev->same_comdat_group != node;
1572 prev = prev->same_comdat_group)
1573 ;
1574 if (node->same_comdat_group == prev)
1575 prev->same_comdat_group = NULL;
1576 else
1577 prev->same_comdat_group = node->same_comdat_group;
1578 node->same_comdat_group = NULL;
1579 }
1580
1581 /* While all the clones are removed after being proceeded, the function
1582 itself is kept in the cgraph even after it is compiled. Check whether
1583 we are done with this body and reclaim it proactively if this is the case.
1584 */
1585 if (!kill_body && *slot)
1586 {
1587 struct cgraph_node *n = (struct cgraph_node *) *slot;
1588 if (!n->clones && !n->clone_of && !n->global.inlined_to
1589 && (cgraph_global_info_ready
1590 && (TREE_ASM_WRITTEN (n->decl) || DECL_EXTERNAL (n->decl)
1591 || n->in_other_partition)))
1592 kill_body = true;
1593 }
1594 if (assembler_name_hash)
1595 {
1596 tree name = DECL_ASSEMBLER_NAME (node->decl);
1597 slot = htab_find_slot_with_hash (assembler_name_hash, name,
1598 decl_assembler_name_hash (name),
1599 NO_INSERT);
1600 /* Inline clones are not hashed. */
1601 if (slot && *slot == node)
1602 htab_clear_slot (assembler_name_hash, slot);
1603 }
1604
1605 if (kill_body)
1606 cgraph_release_function_body (node);
1607 node->decl = NULL;
1608 if (node->call_site_hash)
1609 {
1610 htab_delete (node->call_site_hash);
1611 node->call_site_hash = NULL;
1612 }
1613 cgraph_n_nodes--;
1614
1615 /* Clear out the node to NULL all pointers and add the node to the free
1616 list. */
1617 memset (node, 0, sizeof(*node));
1618 node->uid = uid;
1619 NEXT_FREE_NODE (node) = free_nodes;
1620 free_nodes = node;
1621 }
1622
1623 /* Add NEW_ to the same comdat group that OLD is in. */
1624
1625 void
1626 cgraph_add_to_same_comdat_group (struct cgraph_node *new_,
1627 struct cgraph_node *old)
1628 {
1629 gcc_assert (DECL_ONE_ONLY (old->decl));
1630 gcc_assert (!new_->same_comdat_group);
1631 gcc_assert (new_ != old);
1632
1633 DECL_COMDAT_GROUP (new_->decl) = DECL_COMDAT_GROUP (old->decl);
1634 new_->same_comdat_group = old;
1635 if (!old->same_comdat_group)
1636 old->same_comdat_group = new_;
1637 else
1638 {
1639 struct cgraph_node *n;
1640 for (n = old->same_comdat_group;
1641 n->same_comdat_group != old;
1642 n = n->same_comdat_group)
1643 ;
1644 n->same_comdat_group = new_;
1645 }
1646 }
1647
1648 /* Remove the node from cgraph and all inline clones inlined into it.
1649 Skip however removal of FORBIDDEN_NODE and return true if it needs to be
1650 removed. This allows to call the function from outer loop walking clone
1651 tree. */
1652
1653 bool
1654 cgraph_remove_node_and_inline_clones (struct cgraph_node *node, struct cgraph_node *forbidden_node)
1655 {
1656 struct cgraph_edge *e, *next;
1657 bool found = false;
1658
1659 if (node == forbidden_node)
1660 return true;
1661 for (e = node->callees; e; e = next)
1662 {
1663 next = e->next_callee;
1664 if (!e->inline_failed)
1665 found |= cgraph_remove_node_and_inline_clones (e->callee, forbidden_node);
1666 }
1667 cgraph_remove_node (node);
1668 return found;
1669 }
1670
1671 /* Notify finalize_compilation_unit that given node is reachable. */
1672
1673 void
1674 cgraph_mark_reachable_node (struct cgraph_node *node)
1675 {
1676 if (!node->reachable && node->local.finalized)
1677 {
1678 if (cgraph_global_info_ready)
1679 {
1680 /* Verify that function does not appear to be needed out of blue
1681 during the optimization process. This can happen for extern
1682 inlines when bodies was removed after inlining. */
1683 gcc_assert ((node->analyzed || node->in_other_partition
1684 || DECL_EXTERNAL (node->decl)));
1685 }
1686 else
1687 notice_global_symbol (node->decl);
1688 node->reachable = 1;
1689
1690 node->next_needed = cgraph_nodes_queue;
1691 cgraph_nodes_queue = node;
1692 }
1693 }
1694
1695 /* Likewise indicate that a node is needed, i.e. reachable via some
1696 external means. */
1697
1698 void
1699 cgraph_mark_needed_node (struct cgraph_node *node)
1700 {
1701 node->needed = 1;
1702 gcc_assert (!node->global.inlined_to);
1703 cgraph_mark_reachable_node (node);
1704 }
1705
1706 /* Likewise indicate that a node is having address taken. */
1707
1708 void
1709 cgraph_mark_address_taken_node (struct cgraph_node *node)
1710 {
1711 gcc_assert (!node->global.inlined_to);
1712 cgraph_mark_reachable_node (node);
1713 /* FIXME: address_taken flag is used both as a shortcut for testing whether
1714 IPA_REF_ADDR reference exists (and thus it should be set on node
1715 representing alias we take address of) and as a test whether address
1716 of the object was taken (and thus it should be set on node alias is
1717 referring to). We should remove the first use and the remove the
1718 following set. */
1719 node->address_taken = 1;
1720 node = cgraph_function_or_thunk_node (node, NULL);
1721 node->address_taken = 1;
1722 }
1723
1724 /* Return local info for the compiled function. */
1725
1726 struct cgraph_local_info *
1727 cgraph_local_info (tree decl)
1728 {
1729 struct cgraph_node *node;
1730
1731 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1732 node = cgraph_get_node (decl);
1733 if (!node)
1734 return NULL;
1735 return &node->local;
1736 }
1737
1738 /* Return local info for the compiled function. */
1739
1740 struct cgraph_global_info *
1741 cgraph_global_info (tree decl)
1742 {
1743 struct cgraph_node *node;
1744
1745 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL && cgraph_global_info_ready);
1746 node = cgraph_get_node (decl);
1747 if (!node)
1748 return NULL;
1749 return &node->global;
1750 }
1751
1752 /* Return local info for the compiled function. */
1753
1754 struct cgraph_rtl_info *
1755 cgraph_rtl_info (tree decl)
1756 {
1757 struct cgraph_node *node;
1758
1759 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1760 node = cgraph_get_node (decl);
1761 if (!node
1762 || (decl != current_function_decl
1763 && !TREE_ASM_WRITTEN (node->decl)))
1764 return NULL;
1765 return &node->rtl;
1766 }
1767
1768 /* Return a string describing the failure REASON. */
1769
1770 const char*
1771 cgraph_inline_failed_string (cgraph_inline_failed_t reason)
1772 {
1773 #undef DEFCIFCODE
1774 #define DEFCIFCODE(code, string) string,
1775
1776 static const char *cif_string_table[CIF_N_REASONS] = {
1777 #include "cif-code.def"
1778 };
1779
1780 /* Signedness of an enum type is implementation defined, so cast it
1781 to unsigned before testing. */
1782 gcc_assert ((unsigned) reason < CIF_N_REASONS);
1783 return cif_string_table[reason];
1784 }
1785
1786 /* Return name of the node used in debug output. */
1787 const char *
1788 cgraph_node_name (struct cgraph_node *node)
1789 {
1790 return lang_hooks.decl_printable_name (node->decl, 2);
1791 }
1792
1793 /* Names used to print out the availability enum. */
1794 const char * const cgraph_availability_names[] =
1795 {"unset", "not_available", "overwritable", "available", "local"};
1796
1797
1798 /* Dump call graph node NODE to file F. */
1799
1800 void
1801 dump_cgraph_node (FILE *f, struct cgraph_node *node)
1802 {
1803 struct cgraph_edge *edge;
1804 int indirect_calls_count = 0;
1805
1806 fprintf (f, "%s/%i", cgraph_node_name (node), node->uid);
1807 dump_addr (f, " @", (void *)node);
1808 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
1809 fprintf (f, " (asm: %s)", IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->decl)));
1810 if (node->global.inlined_to)
1811 fprintf (f, " (inline copy in %s/%i)",
1812 cgraph_node_name (node->global.inlined_to),
1813 node->global.inlined_to->uid);
1814 if (node->same_comdat_group)
1815 fprintf (f, " (same comdat group as %s/%i)",
1816 cgraph_node_name (node->same_comdat_group),
1817 node->same_comdat_group->uid);
1818 if (node->clone_of)
1819 fprintf (f, " (clone of %s/%i)",
1820 cgraph_node_name (node->clone_of),
1821 node->clone_of->uid);
1822 if (cgraph_function_flags_ready)
1823 fprintf (f, " availability:%s",
1824 cgraph_availability_names [cgraph_function_body_availability (node)]);
1825 if (node->analyzed)
1826 fprintf (f, " analyzed");
1827 if (node->in_other_partition)
1828 fprintf (f, " in_other_partition");
1829 if (node->count)
1830 fprintf (f, " executed "HOST_WIDEST_INT_PRINT_DEC"x",
1831 (HOST_WIDEST_INT)node->count);
1832 if (node->origin)
1833 fprintf (f, " nested in: %s", cgraph_node_name (node->origin));
1834 if (node->needed)
1835 fprintf (f, " needed");
1836 if (node->address_taken)
1837 fprintf (f, " address_taken");
1838 else if (node->reachable)
1839 fprintf (f, " reachable");
1840 else if (node->reachable_from_other_partition)
1841 fprintf (f, " reachable_from_other_partition");
1842 if (gimple_has_body_p (node->decl))
1843 fprintf (f, " body");
1844 if (node->process)
1845 fprintf (f, " process");
1846 if (node->local.local)
1847 fprintf (f, " local");
1848 if (node->local.externally_visible)
1849 fprintf (f, " externally_visible");
1850 if (node->resolution != LDPR_UNKNOWN)
1851 fprintf (f, " %s",
1852 ld_plugin_symbol_resolution_names[(int)node->resolution]);
1853 if (node->local.finalized)
1854 fprintf (f, " finalized");
1855 if (node->local.redefined_extern_inline)
1856 fprintf (f, " redefined_extern_inline");
1857 if (TREE_ASM_WRITTEN (node->decl))
1858 fprintf (f, " asm_written");
1859 if (node->only_called_at_startup)
1860 fprintf (f, " only_called_at_startup");
1861 if (node->only_called_at_exit)
1862 fprintf (f, " only_called_at_exit");
1863 else if (node->alias)
1864 fprintf (f, " alias");
1865 if (node->tm_clone)
1866 fprintf (f, " tm_clone");
1867
1868 fprintf (f, "\n");
1869
1870 if (node->thunk.thunk_p)
1871 {
1872 fprintf (f, " thunk of %s (asm: %s) fixed offset %i virtual value %i has "
1873 "virtual offset %i)\n",
1874 lang_hooks.decl_printable_name (node->thunk.alias, 2),
1875 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->thunk.alias)),
1876 (int)node->thunk.fixed_offset,
1877 (int)node->thunk.virtual_value,
1878 (int)node->thunk.virtual_offset_p);
1879 }
1880 if (node->alias && node->thunk.alias)
1881 {
1882 fprintf (f, " alias of %s",
1883 lang_hooks.decl_printable_name (node->thunk.alias, 2));
1884 if (DECL_ASSEMBLER_NAME_SET_P (node->thunk.alias))
1885 fprintf (f, " (asm: %s)",
1886 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->thunk.alias)));
1887 fprintf (f, "\n");
1888 }
1889
1890 fprintf (f, " called by: ");
1891
1892 for (edge = node->callers; edge; edge = edge->next_caller)
1893 {
1894 fprintf (f, "%s/%i ", cgraph_node_name (edge->caller),
1895 edge->caller->uid);
1896 if (edge->count)
1897 fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
1898 (HOST_WIDEST_INT)edge->count);
1899 if (edge->frequency)
1900 fprintf (f, "(%.2f per call) ",
1901 edge->frequency / (double)CGRAPH_FREQ_BASE);
1902 if (!edge->inline_failed)
1903 fprintf(f, "(inlined) ");
1904 if (edge->indirect_inlining_edge)
1905 fprintf(f, "(indirect_inlining) ");
1906 if (edge->can_throw_external)
1907 fprintf(f, "(can throw external) ");
1908 }
1909
1910 fprintf (f, "\n calls: ");
1911 for (edge = node->callees; edge; edge = edge->next_callee)
1912 {
1913 fprintf (f, "%s/%i ", cgraph_node_name (edge->callee),
1914 edge->callee->uid);
1915 if (!edge->inline_failed)
1916 fprintf(f, "(inlined) ");
1917 if (edge->indirect_inlining_edge)
1918 fprintf(f, "(indirect_inlining) ");
1919 if (edge->count)
1920 fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
1921 (HOST_WIDEST_INT)edge->count);
1922 if (edge->frequency)
1923 fprintf (f, "(%.2f per call) ",
1924 edge->frequency / (double)CGRAPH_FREQ_BASE);
1925 if (edge->can_throw_external)
1926 fprintf(f, "(can throw external) ");
1927 }
1928 fprintf (f, "\n");
1929 fprintf (f, " References: ");
1930 ipa_dump_references (f, &node->ref_list);
1931 fprintf (f, " Refering this function: ");
1932 ipa_dump_refering (f, &node->ref_list);
1933
1934 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
1935 indirect_calls_count++;
1936 if (indirect_calls_count)
1937 fprintf (f, " has %i outgoing edges for indirect calls.\n",
1938 indirect_calls_count);
1939 }
1940
1941
1942 /* Dump call graph node NODE to stderr. */
1943
1944 DEBUG_FUNCTION void
1945 debug_cgraph_node (struct cgraph_node *node)
1946 {
1947 dump_cgraph_node (stderr, node);
1948 }
1949
1950
1951 /* Dump the callgraph to file F. */
1952
1953 void
1954 dump_cgraph (FILE *f)
1955 {
1956 struct cgraph_node *node;
1957
1958 fprintf (f, "callgraph:\n\n");
1959 for (node = cgraph_nodes; node; node = node->next)
1960 dump_cgraph_node (f, node);
1961 }
1962
1963
1964 /* Dump the call graph to stderr. */
1965
1966 DEBUG_FUNCTION void
1967 debug_cgraph (void)
1968 {
1969 dump_cgraph (stderr);
1970 }
1971
1972
1973 /* Set the DECL_ASSEMBLER_NAME and update cgraph hashtables. */
1974
1975 void
1976 change_decl_assembler_name (tree decl, tree name)
1977 {
1978 struct cgraph_node *node;
1979 void **slot;
1980 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
1981 SET_DECL_ASSEMBLER_NAME (decl, name);
1982 else
1983 {
1984 if (name == DECL_ASSEMBLER_NAME (decl))
1985 return;
1986
1987 if (assembler_name_hash
1988 && TREE_CODE (decl) == FUNCTION_DECL
1989 && (node = cgraph_get_node (decl)) != NULL)
1990 {
1991 tree old_name = DECL_ASSEMBLER_NAME (decl);
1992 slot = htab_find_slot_with_hash (assembler_name_hash, old_name,
1993 decl_assembler_name_hash (old_name),
1994 NO_INSERT);
1995 /* Inline clones are not hashed. */
1996 if (slot && *slot == node)
1997 htab_clear_slot (assembler_name_hash, slot);
1998 }
1999 if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))
2000 && DECL_RTL_SET_P (decl))
2001 warning (0, "%D renamed after being referenced in assembly", decl);
2002
2003 SET_DECL_ASSEMBLER_NAME (decl, name);
2004 }
2005 if (assembler_name_hash
2006 && TREE_CODE (decl) == FUNCTION_DECL
2007 && (node = cgraph_get_node (decl)) != NULL)
2008 {
2009 slot = htab_find_slot_with_hash (assembler_name_hash, name,
2010 decl_assembler_name_hash (name),
2011 INSERT);
2012 gcc_assert (!*slot);
2013 *slot = node;
2014 }
2015 }
2016
2017 /* Add a top-level asm statement to the list. */
2018
2019 struct cgraph_asm_node *
2020 cgraph_add_asm_node (tree asm_str)
2021 {
2022 struct cgraph_asm_node *node;
2023
2024 node = ggc_alloc_cleared_cgraph_asm_node ();
2025 node->asm_str = asm_str;
2026 node->order = cgraph_order++;
2027 node->next = NULL;
2028 if (cgraph_asm_nodes == NULL)
2029 cgraph_asm_nodes = node;
2030 else
2031 cgraph_asm_last_node->next = node;
2032 cgraph_asm_last_node = node;
2033 return node;
2034 }
2035
2036 /* Return true when the DECL can possibly be inlined. */
2037 bool
2038 cgraph_function_possibly_inlined_p (tree decl)
2039 {
2040 if (!cgraph_global_info_ready)
2041 return !DECL_UNINLINABLE (decl);
2042 return DECL_POSSIBLY_INLINED (decl);
2043 }
2044
2045 /* Create clone of E in the node N represented by CALL_EXPR the callgraph. */
2046 struct cgraph_edge *
2047 cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n,
2048 gimple call_stmt, unsigned stmt_uid, gcov_type count_scale,
2049 int freq_scale, bool update_original)
2050 {
2051 struct cgraph_edge *new_edge;
2052 gcov_type count = e->count * count_scale / REG_BR_PROB_BASE;
2053 gcov_type freq;
2054
2055 /* We do not want to ignore loop nest after frequency drops to 0. */
2056 if (!freq_scale)
2057 freq_scale = 1;
2058 freq = e->frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE;
2059 if (freq > CGRAPH_FREQ_MAX)
2060 freq = CGRAPH_FREQ_MAX;
2061
2062 if (e->indirect_unknown_callee)
2063 {
2064 tree decl;
2065
2066 if (call_stmt && (decl = gimple_call_fndecl (call_stmt)))
2067 {
2068 struct cgraph_node *callee = cgraph_get_node (decl);
2069 gcc_checking_assert (callee);
2070 new_edge = cgraph_create_edge (n, callee, call_stmt, count, freq);
2071 }
2072 else
2073 {
2074 new_edge = cgraph_create_indirect_edge (n, call_stmt,
2075 e->indirect_info->ecf_flags,
2076 count, freq);
2077 *new_edge->indirect_info = *e->indirect_info;
2078 }
2079 }
2080 else
2081 {
2082 new_edge = cgraph_create_edge (n, e->callee, call_stmt, count, freq);
2083 if (e->indirect_info)
2084 {
2085 new_edge->indirect_info
2086 = ggc_alloc_cleared_cgraph_indirect_call_info ();
2087 *new_edge->indirect_info = *e->indirect_info;
2088 }
2089 }
2090
2091 new_edge->inline_failed = e->inline_failed;
2092 new_edge->indirect_inlining_edge = e->indirect_inlining_edge;
2093 new_edge->lto_stmt_uid = stmt_uid;
2094 /* Clone flags that depend on call_stmt availability manually. */
2095 new_edge->can_throw_external = e->can_throw_external;
2096 new_edge->call_stmt_cannot_inline_p = e->call_stmt_cannot_inline_p;
2097 if (update_original)
2098 {
2099 e->count -= new_edge->count;
2100 if (e->count < 0)
2101 e->count = 0;
2102 }
2103 cgraph_call_edge_duplication_hooks (e, new_edge);
2104 return new_edge;
2105 }
2106
2107
2108 /* Create node representing clone of N executed COUNT times. Decrease
2109 the execution counts from original node too.
2110 The new clone will have decl set to DECL that may or may not be the same
2111 as decl of N.
2112
2113 When UPDATE_ORIGINAL is true, the counts are subtracted from the original
2114 function's profile to reflect the fact that part of execution is handled
2115 by node.
2116 When CALL_DUPLICATOIN_HOOK is true, the ipa passes are acknowledged about
2117 the new clone. Otherwise the caller is responsible for doing so later. */
2118
2119 struct cgraph_node *
2120 cgraph_clone_node (struct cgraph_node *n, tree decl, gcov_type count, int freq,
2121 bool update_original,
2122 VEC(cgraph_edge_p,heap) *redirect_callers,
2123 bool call_duplication_hook)
2124 {
2125 struct cgraph_node *new_node = cgraph_create_node_1 ();
2126 struct cgraph_edge *e;
2127 gcov_type count_scale;
2128 unsigned i;
2129
2130 new_node->decl = decl;
2131 new_node->origin = n->origin;
2132 if (new_node->origin)
2133 {
2134 new_node->next_nested = new_node->origin->nested;
2135 new_node->origin->nested = new_node;
2136 }
2137 new_node->analyzed = n->analyzed;
2138 new_node->local = n->local;
2139 new_node->local.externally_visible = false;
2140 new_node->local.local = true;
2141 new_node->global = n->global;
2142 new_node->rtl = n->rtl;
2143 new_node->count = count;
2144 new_node->frequency = n->frequency;
2145 new_node->clone = n->clone;
2146 new_node->clone.tree_map = 0;
2147 if (n->count)
2148 {
2149 if (new_node->count > n->count)
2150 count_scale = REG_BR_PROB_BASE;
2151 else
2152 count_scale = new_node->count * REG_BR_PROB_BASE / n->count;
2153 }
2154 else
2155 count_scale = 0;
2156 if (update_original)
2157 {
2158 n->count -= count;
2159 if (n->count < 0)
2160 n->count = 0;
2161 }
2162
2163 FOR_EACH_VEC_ELT (cgraph_edge_p, redirect_callers, i, e)
2164 {
2165 /* Redirect calls to the old version node to point to its new
2166 version. */
2167 cgraph_redirect_edge_callee (e, new_node);
2168 }
2169
2170
2171 for (e = n->callees;e; e=e->next_callee)
2172 cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
2173 count_scale, freq, update_original);
2174
2175 for (e = n->indirect_calls; e; e = e->next_callee)
2176 cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
2177 count_scale, freq, update_original);
2178 ipa_clone_references (new_node, NULL, &n->ref_list);
2179
2180 new_node->next_sibling_clone = n->clones;
2181 if (n->clones)
2182 n->clones->prev_sibling_clone = new_node;
2183 n->clones = new_node;
2184 new_node->clone_of = n;
2185
2186 if (n->decl != decl)
2187 {
2188 struct cgraph_node **slot;
2189 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, new_node, INSERT);
2190 gcc_assert (!*slot);
2191 *slot = new_node;
2192 if (assembler_name_hash)
2193 {
2194 void **aslot;
2195 tree name = DECL_ASSEMBLER_NAME (decl);
2196
2197 aslot = htab_find_slot_with_hash (assembler_name_hash, name,
2198 decl_assembler_name_hash (name),
2199 INSERT);
2200 gcc_assert (!*aslot);
2201 *aslot = new_node;
2202 }
2203 }
2204
2205 if (call_duplication_hook)
2206 cgraph_call_node_duplication_hooks (n, new_node);
2207 return new_node;
2208 }
2209
2210 /* Create a new name for clone of DECL, add SUFFIX. Returns an identifier. */
2211
2212 static GTY(()) unsigned int clone_fn_id_num;
2213
2214 tree
2215 clone_function_name (tree decl, const char *suffix)
2216 {
2217 tree name = DECL_ASSEMBLER_NAME (decl);
2218 size_t len = IDENTIFIER_LENGTH (name);
2219 char *tmp_name, *prefix;
2220
2221 prefix = XALLOCAVEC (char, len + strlen (suffix) + 2);
2222 memcpy (prefix, IDENTIFIER_POINTER (name), len);
2223 strcpy (prefix + len + 1, suffix);
2224 #ifndef NO_DOT_IN_LABEL
2225 prefix[len] = '.';
2226 #elif !defined NO_DOLLAR_IN_LABEL
2227 prefix[len] = '$';
2228 #else
2229 prefix[len] = '_';
2230 #endif
2231 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix, clone_fn_id_num++);
2232 return get_identifier (tmp_name);
2233 }
2234
2235 /* Create callgraph node clone with new declaration. The actual body will
2236 be copied later at compilation stage.
2237
2238 TODO: after merging in ipa-sra use function call notes instead of args_to_skip
2239 bitmap interface.
2240 */
2241 struct cgraph_node *
2242 cgraph_create_virtual_clone (struct cgraph_node *old_node,
2243 VEC(cgraph_edge_p,heap) *redirect_callers,
2244 VEC(ipa_replace_map_p,gc) *tree_map,
2245 bitmap args_to_skip,
2246 const char * suffix)
2247 {
2248 tree old_decl = old_node->decl;
2249 struct cgraph_node *new_node = NULL;
2250 tree new_decl;
2251 size_t i;
2252 struct ipa_replace_map *map;
2253
2254 if (!flag_wpa)
2255 gcc_checking_assert (tree_versionable_function_p (old_decl));
2256
2257 gcc_assert (old_node->local.can_change_signature || !args_to_skip);
2258
2259 /* Make a new FUNCTION_DECL tree node */
2260 if (!args_to_skip)
2261 new_decl = copy_node (old_decl);
2262 else
2263 new_decl = build_function_decl_skip_args (old_decl, args_to_skip, false);
2264 DECL_STRUCT_FUNCTION (new_decl) = NULL;
2265
2266 /* Generate a new name for the new version. */
2267 DECL_NAME (new_decl) = clone_function_name (old_decl, suffix);
2268 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
2269 SET_DECL_RTL (new_decl, NULL);
2270
2271 new_node = cgraph_clone_node (old_node, new_decl, old_node->count,
2272 CGRAPH_FREQ_BASE, false,
2273 redirect_callers, false);
2274 /* Update the properties.
2275 Make clone visible only within this translation unit. Make sure
2276 that is not weak also.
2277 ??? We cannot use COMDAT linkage because there is no
2278 ABI support for this. */
2279 DECL_EXTERNAL (new_node->decl) = 0;
2280 if (DECL_ONE_ONLY (old_decl))
2281 DECL_SECTION_NAME (new_node->decl) = NULL;
2282 DECL_COMDAT_GROUP (new_node->decl) = 0;
2283 TREE_PUBLIC (new_node->decl) = 0;
2284 DECL_COMDAT (new_node->decl) = 0;
2285 DECL_WEAK (new_node->decl) = 0;
2286 DECL_STATIC_CONSTRUCTOR (new_node->decl) = 0;
2287 DECL_STATIC_DESTRUCTOR (new_node->decl) = 0;
2288 new_node->clone.tree_map = tree_map;
2289 new_node->clone.args_to_skip = args_to_skip;
2290 FOR_EACH_VEC_ELT (ipa_replace_map_p, tree_map, i, map)
2291 {
2292 tree var = map->new_tree;
2293
2294 STRIP_NOPS (var);
2295 if (TREE_CODE (var) != ADDR_EXPR)
2296 continue;
2297 var = get_base_var (var);
2298 if (!var)
2299 continue;
2300
2301 /* Record references of the future statement initializing the constant
2302 argument. */
2303 if (TREE_CODE (var) == FUNCTION_DECL)
2304 {
2305 struct cgraph_node *ref_node = cgraph_get_node (var);
2306 gcc_checking_assert (ref_node);
2307 ipa_record_reference (new_node, NULL, ref_node, NULL, IPA_REF_ADDR,
2308 NULL);
2309 }
2310 else if (TREE_CODE (var) == VAR_DECL)
2311 ipa_record_reference (new_node, NULL, NULL, varpool_node (var),
2312 IPA_REF_ADDR, NULL);
2313 }
2314 if (!args_to_skip)
2315 new_node->clone.combined_args_to_skip = old_node->clone.combined_args_to_skip;
2316 else if (old_node->clone.combined_args_to_skip)
2317 {
2318 int newi = 0, oldi = 0;
2319 tree arg;
2320 bitmap new_args_to_skip = BITMAP_GGC_ALLOC ();
2321 struct cgraph_node *orig_node;
2322 for (orig_node = old_node; orig_node->clone_of; orig_node = orig_node->clone_of)
2323 ;
2324 for (arg = DECL_ARGUMENTS (orig_node->decl); arg; arg = DECL_CHAIN (arg), oldi++)
2325 {
2326 if (bitmap_bit_p (old_node->clone.combined_args_to_skip, oldi))
2327 {
2328 bitmap_set_bit (new_args_to_skip, oldi);
2329 continue;
2330 }
2331 if (bitmap_bit_p (args_to_skip, newi))
2332 bitmap_set_bit (new_args_to_skip, oldi);
2333 newi++;
2334 }
2335 new_node->clone.combined_args_to_skip = new_args_to_skip;
2336 }
2337 else
2338 new_node->clone.combined_args_to_skip = args_to_skip;
2339 new_node->local.externally_visible = 0;
2340 new_node->local.local = 1;
2341 new_node->lowered = true;
2342 new_node->reachable = true;
2343
2344 cgraph_call_node_duplication_hooks (old_node, new_node);
2345
2346
2347 return new_node;
2348 }
2349
2350 /* NODE is no longer nested function; update cgraph accordingly. */
2351 void
2352 cgraph_unnest_node (struct cgraph_node *node)
2353 {
2354 struct cgraph_node **node2 = &node->origin->nested;
2355 gcc_assert (node->origin);
2356
2357 while (*node2 != node)
2358 node2 = &(*node2)->next_nested;
2359 *node2 = node->next_nested;
2360 node->origin = NULL;
2361 }
2362
2363 /* Return function availability. See cgraph.h for description of individual
2364 return values. */
2365 enum availability
2366 cgraph_function_body_availability (struct cgraph_node *node)
2367 {
2368 enum availability avail;
2369 gcc_assert (cgraph_function_flags_ready);
2370 if (!node->analyzed)
2371 avail = AVAIL_NOT_AVAILABLE;
2372 else if (node->local.local)
2373 avail = AVAIL_LOCAL;
2374 else if (!node->local.externally_visible)
2375 avail = AVAIL_AVAILABLE;
2376 /* Inline functions are safe to be analyzed even if their symbol can
2377 be overwritten at runtime. It is not meaningful to enforce any sane
2378 behaviour on replacing inline function by different body. */
2379 else if (DECL_DECLARED_INLINE_P (node->decl))
2380 avail = AVAIL_AVAILABLE;
2381
2382 /* If the function can be overwritten, return OVERWRITABLE. Take
2383 care at least of two notable extensions - the COMDAT functions
2384 used to share template instantiations in C++ (this is symmetric
2385 to code cp_cannot_inline_tree_fn and probably shall be shared and
2386 the inlinability hooks completely eliminated).
2387
2388 ??? Does the C++ one definition rule allow us to always return
2389 AVAIL_AVAILABLE here? That would be good reason to preserve this
2390 bit. */
2391
2392 else if (decl_replaceable_p (node->decl) && !DECL_EXTERNAL (node->decl))
2393 avail = AVAIL_OVERWRITABLE;
2394 else avail = AVAIL_AVAILABLE;
2395
2396 return avail;
2397 }
2398
2399 /* Add the function FNDECL to the call graph.
2400 Unlike cgraph_finalize_function, this function is intended to be used
2401 by middle end and allows insertion of new function at arbitrary point
2402 of compilation. The function can be either in high, low or SSA form
2403 GIMPLE.
2404
2405 The function is assumed to be reachable and have address taken (so no
2406 API breaking optimizations are performed on it).
2407
2408 Main work done by this function is to enqueue the function for later
2409 processing to avoid need the passes to be re-entrant. */
2410
2411 void
2412 cgraph_add_new_function (tree fndecl, bool lowered)
2413 {
2414 struct cgraph_node *node;
2415 switch (cgraph_state)
2416 {
2417 case CGRAPH_STATE_CONSTRUCTION:
2418 /* Just enqueue function to be processed at nearest occurrence. */
2419 node = cgraph_create_node (fndecl);
2420 node->next_needed = cgraph_new_nodes;
2421 if (lowered)
2422 node->lowered = true;
2423 cgraph_new_nodes = node;
2424 break;
2425
2426 case CGRAPH_STATE_IPA:
2427 case CGRAPH_STATE_IPA_SSA:
2428 case CGRAPH_STATE_EXPANSION:
2429 /* Bring the function into finalized state and enqueue for later
2430 analyzing and compilation. */
2431 node = cgraph_get_create_node (fndecl);
2432 node->local.local = false;
2433 node->local.finalized = true;
2434 node->reachable = node->needed = true;
2435 if (!lowered && cgraph_state == CGRAPH_STATE_EXPANSION)
2436 {
2437 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
2438 current_function_decl = fndecl;
2439 gimple_register_cfg_hooks ();
2440 tree_lowering_passes (fndecl);
2441 bitmap_obstack_initialize (NULL);
2442 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
2443 execute_pass_list (pass_early_local_passes.pass.sub);
2444 bitmap_obstack_release (NULL);
2445 pop_cfun ();
2446 current_function_decl = NULL;
2447
2448 lowered = true;
2449 }
2450 if (lowered)
2451 node->lowered = true;
2452 node->next_needed = cgraph_new_nodes;
2453 cgraph_new_nodes = node;
2454 break;
2455
2456 case CGRAPH_STATE_FINISHED:
2457 /* At the very end of compilation we have to do all the work up
2458 to expansion. */
2459 node = cgraph_create_node (fndecl);
2460 if (lowered)
2461 node->lowered = true;
2462 cgraph_analyze_function (node);
2463 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
2464 current_function_decl = fndecl;
2465 gimple_register_cfg_hooks ();
2466 bitmap_obstack_initialize (NULL);
2467 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
2468 execute_pass_list (pass_early_local_passes.pass.sub);
2469 bitmap_obstack_release (NULL);
2470 tree_rest_of_compilation (fndecl);
2471 pop_cfun ();
2472 current_function_decl = NULL;
2473 break;
2474 }
2475
2476 /* Set a personality if required and we already passed EH lowering. */
2477 if (lowered
2478 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
2479 == eh_personality_lang))
2480 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
2481 }
2482
2483 /* Worker for cgraph_node_can_be_local_p. */
2484 static bool
2485 cgraph_node_cannot_be_local_p_1 (struct cgraph_node *node,
2486 void *data ATTRIBUTE_UNUSED)
2487 {
2488 return !(!node->needed
2489 && ((DECL_COMDAT (node->decl) && !node->same_comdat_group)
2490 || !node->local.externally_visible));
2491 }
2492
2493 /* Return true if NODE can be made local for API change.
2494 Extern inline functions and C++ COMDAT functions can be made local
2495 at the expense of possible code size growth if function is used in multiple
2496 compilation units. */
2497 bool
2498 cgraph_node_can_be_local_p (struct cgraph_node *node)
2499 {
2500 return (!node->address_taken
2501 && !cgraph_for_node_and_aliases (node,
2502 cgraph_node_cannot_be_local_p_1,
2503 NULL, true));
2504 }
2505
2506 /* Make DECL local. FIXME: We shouldn't need to mess with rtl this early,
2507 but other code such as notice_global_symbol generates rtl. */
2508 void
2509 cgraph_make_decl_local (tree decl)
2510 {
2511 rtx rtl, symbol;
2512
2513 if (TREE_CODE (decl) == VAR_DECL)
2514 DECL_COMMON (decl) = 0;
2515 else gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
2516
2517 if (DECL_ONE_ONLY (decl) || DECL_COMDAT (decl))
2518 {
2519 /* It is possible that we are linking against library defining same COMDAT
2520 function. To avoid conflict we need to rename our local name of the
2521 function just in the case WHOPR partitioning decide to make it hidden
2522 to avoid cross partition references. */
2523 if (flag_wpa)
2524 {
2525 const char *old_name;
2526
2527 old_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
2528 if (TREE_CODE (decl) == FUNCTION_DECL)
2529 {
2530 struct cgraph_node *node = cgraph_get_node (decl);
2531 change_decl_assembler_name (decl,
2532 clone_function_name (decl, "local"));
2533 if (node->local.lto_file_data)
2534 lto_record_renamed_decl (node->local.lto_file_data,
2535 old_name,
2536 IDENTIFIER_POINTER
2537 (DECL_ASSEMBLER_NAME (decl)));
2538 }
2539 else if (TREE_CODE (decl) == VAR_DECL)
2540 {
2541 struct varpool_node *vnode = varpool_get_node (decl);
2542 /* change_decl_assembler_name will warn here on vtables because
2543 C++ frontend still sets TREE_SYMBOL_REFERENCED on them. */
2544 SET_DECL_ASSEMBLER_NAME (decl,
2545 clone_function_name (decl, "local"));
2546 if (vnode->lto_file_data)
2547 lto_record_renamed_decl (vnode->lto_file_data,
2548 old_name,
2549 IDENTIFIER_POINTER
2550 (DECL_ASSEMBLER_NAME (decl)));
2551 }
2552 }
2553 DECL_SECTION_NAME (decl) = 0;
2554 DECL_COMDAT (decl) = 0;
2555 }
2556 DECL_COMDAT_GROUP (decl) = 0;
2557 DECL_WEAK (decl) = 0;
2558 DECL_EXTERNAL (decl) = 0;
2559 TREE_PUBLIC (decl) = 0;
2560 if (!DECL_RTL_SET_P (decl))
2561 return;
2562
2563 /* Update rtl flags. */
2564 make_decl_rtl (decl);
2565
2566 rtl = DECL_RTL (decl);
2567 if (!MEM_P (rtl))
2568 return;
2569
2570 symbol = XEXP (rtl, 0);
2571 if (GET_CODE (symbol) != SYMBOL_REF)
2572 return;
2573
2574 SYMBOL_REF_WEAK (symbol) = DECL_WEAK (decl);
2575 }
2576
2577 /* Call calback on NODE, thunks and aliases asociated to NODE.
2578 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
2579 skipped. */
2580
2581 bool
2582 cgraph_for_node_thunks_and_aliases (struct cgraph_node *node,
2583 bool (*callback) (struct cgraph_node *, void *),
2584 void *data,
2585 bool include_overwritable)
2586 {
2587 struct cgraph_edge *e;
2588 int i;
2589 struct ipa_ref *ref;
2590
2591 if (callback (node, data))
2592 return true;
2593 for (e = node->callers; e; e = e->next_caller)
2594 if (e->caller->thunk.thunk_p
2595 && (include_overwritable
2596 || cgraph_function_body_availability (e->caller) > AVAIL_OVERWRITABLE))
2597 if (cgraph_for_node_thunks_and_aliases (e->caller, callback, data,
2598 include_overwritable))
2599 return true;
2600 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref); i++)
2601 if (ref->use == IPA_REF_ALIAS)
2602 {
2603 struct cgraph_node *alias = ipa_ref_refering_node (ref);
2604 if (include_overwritable
2605 || cgraph_function_body_availability (alias) > AVAIL_OVERWRITABLE)
2606 if (cgraph_for_node_thunks_and_aliases (alias, callback, data,
2607 include_overwritable))
2608 return true;
2609 }
2610 return false;
2611 }
2612
2613 /* Call calback on NODE and aliases asociated to NODE.
2614 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
2615 skipped. */
2616
2617 bool
2618 cgraph_for_node_and_aliases (struct cgraph_node *node,
2619 bool (*callback) (struct cgraph_node *, void *),
2620 void *data,
2621 bool include_overwritable)
2622 {
2623 int i;
2624 struct ipa_ref *ref;
2625
2626 if (callback (node, data))
2627 return true;
2628 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref); i++)
2629 if (ref->use == IPA_REF_ALIAS)
2630 {
2631 struct cgraph_node *alias = ipa_ref_refering_node (ref);
2632 if (include_overwritable
2633 || cgraph_function_body_availability (alias) > AVAIL_OVERWRITABLE)
2634 if (cgraph_for_node_and_aliases (alias, callback, data,
2635 include_overwritable))
2636 return true;
2637 }
2638 return false;
2639 }
2640
2641 /* Worker to bring NODE local. */
2642
2643 static bool
2644 cgraph_make_node_local_1 (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2645 {
2646 gcc_checking_assert (cgraph_node_can_be_local_p (node));
2647 if (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
2648 {
2649 cgraph_make_decl_local (node->decl);
2650
2651 node->local.externally_visible = false;
2652 node->local.local = true;
2653 node->resolution = LDPR_PREVAILING_DEF_IRONLY;
2654 gcc_assert (cgraph_function_body_availability (node) == AVAIL_LOCAL);
2655 }
2656 return false;
2657 }
2658
2659 /* Bring NODE local. */
2660
2661 void
2662 cgraph_make_node_local (struct cgraph_node *node)
2663 {
2664 cgraph_for_node_thunks_and_aliases (node, cgraph_make_node_local_1,
2665 NULL, true);
2666 }
2667
2668 /* Worker to set nothrow flag. */
2669
2670 static bool
2671 cgraph_set_nothrow_flag_1 (struct cgraph_node *node, void *data)
2672 {
2673 struct cgraph_edge *e;
2674
2675 TREE_NOTHROW (node->decl) = data != NULL;
2676
2677 if (data != NULL)
2678 for (e = node->callers; e; e = e->next_caller)
2679 e->can_throw_external = false;
2680 return false;
2681 }
2682
2683 /* Set TREE_NOTHROW on NODE's decl and on aliases of NODE
2684 if any to NOTHROW. */
2685
2686 void
2687 cgraph_set_nothrow_flag (struct cgraph_node *node, bool nothrow)
2688 {
2689 cgraph_for_node_thunks_and_aliases (node, cgraph_set_nothrow_flag_1,
2690 (void *)(size_t)nothrow, false);
2691 }
2692
2693 /* Worker to set const flag. */
2694
2695 static bool
2696 cgraph_set_const_flag_1 (struct cgraph_node *node, void *data)
2697 {
2698 /* Static constructors and destructors without a side effect can be
2699 optimized out. */
2700 if (data && !((size_t)data & 2))
2701 {
2702 if (DECL_STATIC_CONSTRUCTOR (node->decl))
2703 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2704 if (DECL_STATIC_DESTRUCTOR (node->decl))
2705 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2706 }
2707 TREE_READONLY (node->decl) = data != NULL;
2708 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = ((size_t)data & 2) != 0;
2709 return false;
2710 }
2711
2712 /* Set TREE_READONLY on NODE's decl and on aliases of NODE
2713 if any to READONLY. */
2714
2715 void
2716 cgraph_set_const_flag (struct cgraph_node *node, bool readonly, bool looping)
2717 {
2718 cgraph_for_node_thunks_and_aliases (node, cgraph_set_const_flag_1,
2719 (void *)(size_t)(readonly + (int)looping * 2),
2720 false);
2721 }
2722
2723 /* Worker to set pure flag. */
2724
2725 static bool
2726 cgraph_set_pure_flag_1 (struct cgraph_node *node, void *data)
2727 {
2728 /* Static pureructors and destructors without a side effect can be
2729 optimized out. */
2730 if (data && !((size_t)data & 2))
2731 {
2732 if (DECL_STATIC_CONSTRUCTOR (node->decl))
2733 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2734 if (DECL_STATIC_DESTRUCTOR (node->decl))
2735 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2736 }
2737 DECL_PURE_P (node->decl) = data != NULL;
2738 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = ((size_t)data & 2) != 0;
2739 return false;
2740 }
2741
2742 /* Set DECL_PURE_P on NODE's decl and on aliases of NODE
2743 if any to PURE. */
2744
2745 void
2746 cgraph_set_pure_flag (struct cgraph_node *node, bool pure, bool looping)
2747 {
2748 cgraph_for_node_thunks_and_aliases (node, cgraph_set_pure_flag_1,
2749 (void *)(size_t)(pure + (int)looping * 2),
2750 false);
2751 }
2752
2753 /* Data used by cgraph_propagate_frequency. */
2754
2755 struct cgraph_propagate_frequency_data
2756 {
2757 bool maybe_unlikely_executed;
2758 bool maybe_executed_once;
2759 bool only_called_at_startup;
2760 bool only_called_at_exit;
2761 };
2762
2763 /* Worker for cgraph_propagate_frequency_1. */
2764
2765 static bool
2766 cgraph_propagate_frequency_1 (struct cgraph_node *node, void *data)
2767 {
2768 struct cgraph_propagate_frequency_data *d;
2769 struct cgraph_edge *edge;
2770
2771 d = (struct cgraph_propagate_frequency_data *)data;
2772 for (edge = node->callers;
2773 edge && (d->maybe_unlikely_executed || d->maybe_executed_once
2774 || d->only_called_at_startup || d->only_called_at_exit);
2775 edge = edge->next_caller)
2776 {
2777 if (edge->caller != node)
2778 {
2779 d->only_called_at_startup &= edge->caller->only_called_at_startup;
2780 /* It makes sense to put main() together with the static constructors.
2781 It will be executed for sure, but rest of functions called from
2782 main are definitely not at startup only. */
2783 if (MAIN_NAME_P (DECL_NAME (edge->caller->decl)))
2784 d->only_called_at_startup = 0;
2785 d->only_called_at_exit &= edge->caller->only_called_at_exit;
2786 }
2787 if (!edge->frequency)
2788 continue;
2789 switch (edge->caller->frequency)
2790 {
2791 case NODE_FREQUENCY_UNLIKELY_EXECUTED:
2792 break;
2793 case NODE_FREQUENCY_EXECUTED_ONCE:
2794 if (dump_file && (dump_flags & TDF_DETAILS))
2795 fprintf (dump_file, " Called by %s that is executed once\n",
2796 cgraph_node_name (edge->caller));
2797 d->maybe_unlikely_executed = false;
2798 if (inline_edge_summary (edge)->loop_depth)
2799 {
2800 d->maybe_executed_once = false;
2801 if (dump_file && (dump_flags & TDF_DETAILS))
2802 fprintf (dump_file, " Called in loop\n");
2803 }
2804 break;
2805 case NODE_FREQUENCY_HOT:
2806 case NODE_FREQUENCY_NORMAL:
2807 if (dump_file && (dump_flags & TDF_DETAILS))
2808 fprintf (dump_file, " Called by %s that is normal or hot\n",
2809 cgraph_node_name (edge->caller));
2810 d->maybe_unlikely_executed = false;
2811 d->maybe_executed_once = false;
2812 break;
2813 }
2814 }
2815 return edge != NULL;
2816 }
2817
2818 /* See if the frequency of NODE can be updated based on frequencies of its
2819 callers. */
2820 bool
2821 cgraph_propagate_frequency (struct cgraph_node *node)
2822 {
2823 struct cgraph_propagate_frequency_data d = {true, true, true, true};
2824 bool changed = false;
2825
2826 if (!node->local.local)
2827 return false;
2828 gcc_assert (node->analyzed);
2829 if (dump_file && (dump_flags & TDF_DETAILS))
2830 fprintf (dump_file, "Processing frequency %s\n", cgraph_node_name (node));
2831
2832 cgraph_for_node_and_aliases (node, cgraph_propagate_frequency_1, &d, true);
2833
2834 if ((d.only_called_at_startup && !d.only_called_at_exit)
2835 && !node->only_called_at_startup)
2836 {
2837 node->only_called_at_startup = true;
2838 if (dump_file)
2839 fprintf (dump_file, "Node %s promoted to only called at startup.\n",
2840 cgraph_node_name (node));
2841 changed = true;
2842 }
2843 if ((d.only_called_at_exit && !d.only_called_at_startup)
2844 && !node->only_called_at_exit)
2845 {
2846 node->only_called_at_exit = true;
2847 if (dump_file)
2848 fprintf (dump_file, "Node %s promoted to only called at exit.\n",
2849 cgraph_node_name (node));
2850 changed = true;
2851 }
2852 /* These come either from profile or user hints; never update them. */
2853 if (node->frequency == NODE_FREQUENCY_HOT
2854 || node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
2855 return changed;
2856 if (d.maybe_unlikely_executed)
2857 {
2858 node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED;
2859 if (dump_file)
2860 fprintf (dump_file, "Node %s promoted to unlikely executed.\n",
2861 cgraph_node_name (node));
2862 changed = true;
2863 }
2864 else if (d.maybe_executed_once && node->frequency != NODE_FREQUENCY_EXECUTED_ONCE)
2865 {
2866 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2867 if (dump_file)
2868 fprintf (dump_file, "Node %s promoted to executed once.\n",
2869 cgraph_node_name (node));
2870 changed = true;
2871 }
2872 return changed;
2873 }
2874
2875 /* Return true when NODE can not return or throw and thus
2876 it is safe to ignore its side effects for IPA analysis. */
2877
2878 bool
2879 cgraph_node_cannot_return (struct cgraph_node *node)
2880 {
2881 int flags = flags_from_decl_or_type (node->decl);
2882 if (!flag_exceptions)
2883 return (flags & ECF_NORETURN) != 0;
2884 else
2885 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2886 == (ECF_NORETURN | ECF_NOTHROW));
2887 }
2888
2889 /* Return true when call of E can not lead to return from caller
2890 and thus it is safe to ignore its side effects for IPA analysis
2891 when computing side effects of the caller.
2892 FIXME: We could actually mark all edges that have no reaching
2893 patch to EXIT_BLOCK_PTR or throw to get better results. */
2894 bool
2895 cgraph_edge_cannot_lead_to_return (struct cgraph_edge *e)
2896 {
2897 if (cgraph_node_cannot_return (e->caller))
2898 return true;
2899 if (e->indirect_unknown_callee)
2900 {
2901 int flags = e->indirect_info->ecf_flags;
2902 if (!flag_exceptions)
2903 return (flags & ECF_NORETURN) != 0;
2904 else
2905 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2906 == (ECF_NORETURN | ECF_NOTHROW));
2907 }
2908 else
2909 return cgraph_node_cannot_return (e->callee);
2910 }
2911
2912 /* Return true when function NODE can be removed from callgraph
2913 if all direct calls are eliminated. */
2914
2915 bool
2916 cgraph_can_remove_if_no_direct_calls_and_refs_p (struct cgraph_node *node)
2917 {
2918 gcc_assert (!node->global.inlined_to);
2919 /* Extern inlines can always go, we will use the external definition. */
2920 if (DECL_EXTERNAL (node->decl))
2921 return true;
2922 /* When function is needed, we can not remove it. */
2923 if (node->needed || node->reachable_from_other_partition)
2924 return false;
2925 if (DECL_STATIC_CONSTRUCTOR (node->decl)
2926 || DECL_STATIC_DESTRUCTOR (node->decl))
2927 return false;
2928 /* Only COMDAT functions can be removed if externally visible. */
2929 if (node->local.externally_visible
2930 && (!DECL_COMDAT (node->decl)
2931 || cgraph_used_from_object_file_p (node)))
2932 return false;
2933 return true;
2934 }
2935
2936 /* Worker for cgraph_can_remove_if_no_direct_calls_p. */
2937
2938 static bool
2939 nonremovable_p (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2940 {
2941 return !cgraph_can_remove_if_no_direct_calls_and_refs_p (node);
2942 }
2943
2944 /* Return true when function NODE and its aliases can be removed from callgraph
2945 if all direct calls are eliminated. */
2946
2947 bool
2948 cgraph_can_remove_if_no_direct_calls_p (struct cgraph_node *node)
2949 {
2950 /* Extern inlines can always go, we will use the external definition. */
2951 if (DECL_EXTERNAL (node->decl))
2952 return true;
2953 if (node->address_taken)
2954 return false;
2955 return !cgraph_for_node_and_aliases (node, nonremovable_p, NULL, true);
2956 }
2957
2958 /* Worker for cgraph_can_remove_if_no_direct_calls_p. */
2959
2960 static bool
2961 used_from_object_file_p (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2962 {
2963 return cgraph_used_from_object_file_p (node);
2964 }
2965
2966 /* Return true when function NODE can be expected to be removed
2967 from program when direct calls in this compilation unit are removed.
2968
2969 As a special case COMDAT functions are
2970 cgraph_can_remove_if_no_direct_calls_p while the are not
2971 cgraph_only_called_directly_p (it is possible they are called from other
2972 unit)
2973
2974 This function behaves as cgraph_only_called_directly_p because eliminating
2975 all uses of COMDAT function does not make it necessarily disappear from
2976 the program unless we are compiling whole program or we do LTO. In this
2977 case we know we win since dynamic linking will not really discard the
2978 linkonce section. */
2979
2980 bool
2981 cgraph_will_be_removed_from_program_if_no_direct_calls (struct cgraph_node *node)
2982 {
2983 gcc_assert (!node->global.inlined_to);
2984 if (cgraph_for_node_and_aliases (node, used_from_object_file_p, NULL, true))
2985 return false;
2986 if (!in_lto_p && !flag_whole_program)
2987 return cgraph_only_called_directly_p (node);
2988 else
2989 {
2990 if (DECL_EXTERNAL (node->decl))
2991 return true;
2992 return cgraph_can_remove_if_no_direct_calls_p (node);
2993 }
2994 }
2995
2996 /* Return true when RESOLUTION indicate that linker will use
2997 the symbol from non-LTO object files. */
2998
2999 bool
3000 resolution_used_from_other_file_p (enum ld_plugin_symbol_resolution resolution)
3001 {
3002 return (resolution == LDPR_PREVAILING_DEF
3003 || resolution == LDPR_PREEMPTED_REG
3004 || resolution == LDPR_RESOLVED_EXEC
3005 || resolution == LDPR_RESOLVED_DYN);
3006 }
3007
3008
3009 /* Return true when NODE is known to be used from other (non-LTO) object file.
3010 Known only when doing LTO via linker plugin. */
3011
3012 bool
3013 cgraph_used_from_object_file_p (struct cgraph_node *node)
3014 {
3015 gcc_assert (!node->global.inlined_to);
3016 if (!TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl))
3017 return false;
3018 if (resolution_used_from_other_file_p (node->resolution))
3019 return true;
3020 return false;
3021 }
3022
3023 /* Worker for cgraph_only_called_directly_p. */
3024
3025 static bool
3026 cgraph_not_only_called_directly_p_1 (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3027 {
3028 return !cgraph_only_called_directly_or_aliased_p (node);
3029 }
3030
3031 /* Return true when function NODE and all its aliases are only called
3032 directly.
3033 i.e. it is not externally visible, address was not taken and
3034 it is not used in any other non-standard way. */
3035
3036 bool
3037 cgraph_only_called_directly_p (struct cgraph_node *node)
3038 {
3039 gcc_assert (cgraph_function_or_thunk_node (node, NULL) == node);
3040 return !cgraph_for_node_and_aliases (node, cgraph_not_only_called_directly_p_1,
3041 NULL, true);
3042 }
3043
3044
3045 /* Collect all callers of NODE. Worker for collect_callers_of_node. */
3046
3047 static bool
3048 collect_callers_of_node_1 (struct cgraph_node *node, void *data)
3049 {
3050 VEC (cgraph_edge_p, heap) ** redirect_callers = (VEC (cgraph_edge_p, heap) **)data;
3051 struct cgraph_edge *cs;
3052 enum availability avail;
3053 cgraph_function_or_thunk_node (node, &avail);
3054
3055 if (avail > AVAIL_OVERWRITABLE)
3056 for (cs = node->callers; cs != NULL; cs = cs->next_caller)
3057 if (!cs->indirect_inlining_edge)
3058 VEC_safe_push (cgraph_edge_p, heap, *redirect_callers, cs);
3059 return false;
3060 }
3061
3062 /* Collect all callers of NODE and its aliases that are known to lead to NODE
3063 (i.e. are not overwritable). */
3064
3065 VEC (cgraph_edge_p, heap) *
3066 collect_callers_of_node (struct cgraph_node *node)
3067 {
3068 VEC (cgraph_edge_p, heap) * redirect_callers = NULL;
3069 cgraph_for_node_and_aliases (node, collect_callers_of_node_1,
3070 &redirect_callers, false);
3071 return redirect_callers;
3072 }
3073
3074 #include "gt-cgraph.h"