cgraph.h (struct cgraph_node): New field indirect_calls.
[gcc.git] / gcc / cgraph.c
1 /* Callgraph handling code.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This file contains basic routines manipulating call graph
23
24 The callgraph:
25
26 The call-graph is data structure designed for intra-procedural optimization
27 but it is also used in non-unit-at-a-time compilation to allow easier code
28 sharing.
29
30 The call-graph consist of nodes and edges represented via linked lists.
31 Each function (external or not) corresponds to the unique node.
32
33 The mapping from declarations to call-graph nodes is done using hash table
34 based on DECL_UID. The call-graph nodes are created lazily using
35 cgraph_node function when called for unknown declaration.
36
37 The callgraph at the moment does not represent all indirect calls or calls
38 from other compilation units. Flag NEEDED is set for each node that may be
39 accessed in such an invisible way and it shall be considered an entry point
40 to the callgraph.
41
42 On the other hand, the callgraph currently does contain some edges for
43 indirect calls with unknown callees which can be accessed through
44 indirect_calls field of a node. It should be noted however that at the
45 moment only calls which are potential candidates for indirect inlining are
46 added there.
47
48 Interprocedural information:
49
50 Callgraph is place to store data needed for interprocedural optimization.
51 All data structures are divided into three components: local_info that
52 is produced while analyzing the function, global_info that is result
53 of global walking of the callgraph on the end of compilation and
54 rtl_info used by RTL backend to propagate data from already compiled
55 functions to their callers.
56
57 Moreover, each node has a uid which can be used to keep information in
58 on-the-side arrays. UIDs are reused and therefore reasonably dense.
59
60 Inlining plans:
61
62 The function inlining information is decided in advance and maintained
63 in the callgraph as so called inline plan.
64 For each inlined call, the callee's node is cloned to represent the
65 new function copy produced by inliner.
66 Each inlined call gets a unique corresponding clone node of the callee
67 and the data structure is updated while inlining is performed, so
68 the clones are eliminated and their callee edges redirected to the
69 caller.
70
71 Each edge has "inline_failed" field. When the field is set to NULL,
72 the call will be inlined. When it is non-NULL it contains a reason
73 why inlining wasn't performed. */
74
75 #include "config.h"
76 #include "system.h"
77 #include "coretypes.h"
78 #include "tm.h"
79 #include "tree.h"
80 #include "tree-inline.h"
81 #include "langhooks.h"
82 #include "hashtab.h"
83 #include "toplev.h"
84 #include "flags.h"
85 #include "ggc.h"
86 #include "debug.h"
87 #include "target.h"
88 #include "basic-block.h"
89 #include "cgraph.h"
90 #include "output.h"
91 #include "intl.h"
92 #include "gimple.h"
93 #include "tree-dump.h"
94 #include "tree-flow.h"
95 #include "value-prof.h"
96 #include "except.h"
97 #include "diagnostic.h"
98 #include "rtl.h"
99
100 static void cgraph_node_remove_callers (struct cgraph_node *node);
101 static inline void cgraph_edge_remove_caller (struct cgraph_edge *e);
102 static inline void cgraph_edge_remove_callee (struct cgraph_edge *e);
103
104 /* Hash table used to convert declarations into nodes. */
105 static GTY((param_is (struct cgraph_node))) htab_t cgraph_hash;
106 /* Hash table used to convert assembler names into nodes. */
107 static GTY((param_is (struct cgraph_node))) htab_t assembler_name_hash;
108
109 /* The linked list of cgraph nodes. */
110 struct cgraph_node *cgraph_nodes;
111
112 /* Queue of cgraph nodes scheduled to be lowered. */
113 struct cgraph_node *cgraph_nodes_queue;
114
115 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
116 secondary queue used during optimization to accommodate passes that
117 may generate new functions that need to be optimized and expanded. */
118 struct cgraph_node *cgraph_new_nodes;
119
120 /* Number of nodes in existence. */
121 int cgraph_n_nodes;
122
123 /* Maximal uid used in cgraph nodes. */
124 int cgraph_max_uid;
125
126 /* Maximal uid used in cgraph edges. */
127 int cgraph_edge_max_uid;
128
129 /* Maximal pid used for profiling */
130 int cgraph_max_pid;
131
132 /* Set when whole unit has been analyzed so we can access global info. */
133 bool cgraph_global_info_ready = false;
134
135 /* What state callgraph is in right now. */
136 enum cgraph_state cgraph_state = CGRAPH_STATE_CONSTRUCTION;
137
138 /* Set when the cgraph is fully build and the basic flags are computed. */
139 bool cgraph_function_flags_ready = false;
140
141 /* Linked list of cgraph asm nodes. */
142 struct cgraph_asm_node *cgraph_asm_nodes;
143
144 /* Last node in cgraph_asm_nodes. */
145 static GTY(()) struct cgraph_asm_node *cgraph_asm_last_node;
146
147 /* The order index of the next cgraph node to be created. This is
148 used so that we can sort the cgraph nodes in order by when we saw
149 them, to support -fno-toplevel-reorder. */
150 int cgraph_order;
151
152 /* List of hooks trigerred on cgraph_edge events. */
153 struct cgraph_edge_hook_list {
154 cgraph_edge_hook hook;
155 void *data;
156 struct cgraph_edge_hook_list *next;
157 };
158
159 /* List of hooks trigerred on cgraph_node events. */
160 struct cgraph_node_hook_list {
161 cgraph_node_hook hook;
162 void *data;
163 struct cgraph_node_hook_list *next;
164 };
165
166 /* List of hooks trigerred on events involving two cgraph_edges. */
167 struct cgraph_2edge_hook_list {
168 cgraph_2edge_hook hook;
169 void *data;
170 struct cgraph_2edge_hook_list *next;
171 };
172
173 /* List of hooks trigerred on events involving two cgraph_nodes. */
174 struct cgraph_2node_hook_list {
175 cgraph_2node_hook hook;
176 void *data;
177 struct cgraph_2node_hook_list *next;
178 };
179
180 /* List of hooks triggered when an edge is removed. */
181 struct cgraph_edge_hook_list *first_cgraph_edge_removal_hook;
182 /* List of hooks triggered when a node is removed. */
183 struct cgraph_node_hook_list *first_cgraph_node_removal_hook;
184 /* List of hooks triggered when an edge is duplicated. */
185 struct cgraph_2edge_hook_list *first_cgraph_edge_duplicated_hook;
186 /* List of hooks triggered when a node is duplicated. */
187 struct cgraph_2node_hook_list *first_cgraph_node_duplicated_hook;
188 /* List of hooks triggered when an function is inserted. */
189 struct cgraph_node_hook_list *first_cgraph_function_insertion_hook;
190
191 /* Head of a linked list of unused (freed) call graph nodes.
192 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
193 static GTY(()) struct cgraph_node *free_nodes;
194 /* Head of a linked list of unused (freed) call graph edges.
195 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
196 static GTY(()) struct cgraph_edge *free_edges;
197
198 /* Macros to access the next item in the list of free cgraph nodes and
199 edges. */
200 #define NEXT_FREE_NODE(NODE) (NODE)->next
201 #define NEXT_FREE_EDGE(EDGE) (EDGE)->prev_caller
202
203 /* Register HOOK to be called with DATA on each removed edge. */
204 struct cgraph_edge_hook_list *
205 cgraph_add_edge_removal_hook (cgraph_edge_hook hook, void *data)
206 {
207 struct cgraph_edge_hook_list *entry;
208 struct cgraph_edge_hook_list **ptr = &first_cgraph_edge_removal_hook;
209
210 entry = (struct cgraph_edge_hook_list *) xmalloc (sizeof (*entry));
211 entry->hook = hook;
212 entry->data = data;
213 entry->next = NULL;
214 while (*ptr)
215 ptr = &(*ptr)->next;
216 *ptr = entry;
217 return entry;
218 }
219
220 /* Remove ENTRY from the list of hooks called on removing edges. */
221 void
222 cgraph_remove_edge_removal_hook (struct cgraph_edge_hook_list *entry)
223 {
224 struct cgraph_edge_hook_list **ptr = &first_cgraph_edge_removal_hook;
225
226 while (*ptr != entry)
227 ptr = &(*ptr)->next;
228 *ptr = entry->next;
229 free (entry);
230 }
231
232 /* Call all edge removal hooks. */
233 static void
234 cgraph_call_edge_removal_hooks (struct cgraph_edge *e)
235 {
236 struct cgraph_edge_hook_list *entry = first_cgraph_edge_removal_hook;
237 while (entry)
238 {
239 entry->hook (e, entry->data);
240 entry = entry->next;
241 }
242 }
243
244 /* Register HOOK to be called with DATA on each removed node. */
245 struct cgraph_node_hook_list *
246 cgraph_add_node_removal_hook (cgraph_node_hook hook, void *data)
247 {
248 struct cgraph_node_hook_list *entry;
249 struct cgraph_node_hook_list **ptr = &first_cgraph_node_removal_hook;
250
251 entry = (struct cgraph_node_hook_list *) xmalloc (sizeof (*entry));
252 entry->hook = hook;
253 entry->data = data;
254 entry->next = NULL;
255 while (*ptr)
256 ptr = &(*ptr)->next;
257 *ptr = entry;
258 return entry;
259 }
260
261 /* Remove ENTRY from the list of hooks called on removing nodes. */
262 void
263 cgraph_remove_node_removal_hook (struct cgraph_node_hook_list *entry)
264 {
265 struct cgraph_node_hook_list **ptr = &first_cgraph_node_removal_hook;
266
267 while (*ptr != entry)
268 ptr = &(*ptr)->next;
269 *ptr = entry->next;
270 free (entry);
271 }
272
273 /* Call all node removal hooks. */
274 static void
275 cgraph_call_node_removal_hooks (struct cgraph_node *node)
276 {
277 struct cgraph_node_hook_list *entry = first_cgraph_node_removal_hook;
278 while (entry)
279 {
280 entry->hook (node, entry->data);
281 entry = entry->next;
282 }
283 }
284
285 /* Register HOOK to be called with DATA on each inserted node. */
286 struct cgraph_node_hook_list *
287 cgraph_add_function_insertion_hook (cgraph_node_hook hook, void *data)
288 {
289 struct cgraph_node_hook_list *entry;
290 struct cgraph_node_hook_list **ptr = &first_cgraph_function_insertion_hook;
291
292 entry = (struct cgraph_node_hook_list *) xmalloc (sizeof (*entry));
293 entry->hook = hook;
294 entry->data = data;
295 entry->next = NULL;
296 while (*ptr)
297 ptr = &(*ptr)->next;
298 *ptr = entry;
299 return entry;
300 }
301
302 /* Remove ENTRY from the list of hooks called on inserted nodes. */
303 void
304 cgraph_remove_function_insertion_hook (struct cgraph_node_hook_list *entry)
305 {
306 struct cgraph_node_hook_list **ptr = &first_cgraph_function_insertion_hook;
307
308 while (*ptr != entry)
309 ptr = &(*ptr)->next;
310 *ptr = entry->next;
311 free (entry);
312 }
313
314 /* Call all node insertion hooks. */
315 void
316 cgraph_call_function_insertion_hooks (struct cgraph_node *node)
317 {
318 struct cgraph_node_hook_list *entry = first_cgraph_function_insertion_hook;
319 while (entry)
320 {
321 entry->hook (node, entry->data);
322 entry = entry->next;
323 }
324 }
325
326 /* Register HOOK to be called with DATA on each duplicated edge. */
327 struct cgraph_2edge_hook_list *
328 cgraph_add_edge_duplication_hook (cgraph_2edge_hook hook, void *data)
329 {
330 struct cgraph_2edge_hook_list *entry;
331 struct cgraph_2edge_hook_list **ptr = &first_cgraph_edge_duplicated_hook;
332
333 entry = (struct cgraph_2edge_hook_list *) xmalloc (sizeof (*entry));
334 entry->hook = hook;
335 entry->data = data;
336 entry->next = NULL;
337 while (*ptr)
338 ptr = &(*ptr)->next;
339 *ptr = entry;
340 return entry;
341 }
342
343 /* Remove ENTRY from the list of hooks called on duplicating edges. */
344 void
345 cgraph_remove_edge_duplication_hook (struct cgraph_2edge_hook_list *entry)
346 {
347 struct cgraph_2edge_hook_list **ptr = &first_cgraph_edge_duplicated_hook;
348
349 while (*ptr != entry)
350 ptr = &(*ptr)->next;
351 *ptr = entry->next;
352 free (entry);
353 }
354
355 /* Call all edge duplication hooks. */
356 static void
357 cgraph_call_edge_duplication_hooks (struct cgraph_edge *cs1,
358 struct cgraph_edge *cs2)
359 {
360 struct cgraph_2edge_hook_list *entry = first_cgraph_edge_duplicated_hook;
361 while (entry)
362 {
363 entry->hook (cs1, cs2, entry->data);
364 entry = entry->next;
365 }
366 }
367
368 /* Register HOOK to be called with DATA on each duplicated node. */
369 struct cgraph_2node_hook_list *
370 cgraph_add_node_duplication_hook (cgraph_2node_hook hook, void *data)
371 {
372 struct cgraph_2node_hook_list *entry;
373 struct cgraph_2node_hook_list **ptr = &first_cgraph_node_duplicated_hook;
374
375 entry = (struct cgraph_2node_hook_list *) xmalloc (sizeof (*entry));
376 entry->hook = hook;
377 entry->data = data;
378 entry->next = NULL;
379 while (*ptr)
380 ptr = &(*ptr)->next;
381 *ptr = entry;
382 return entry;
383 }
384
385 /* Remove ENTRY from the list of hooks called on duplicating nodes. */
386 void
387 cgraph_remove_node_duplication_hook (struct cgraph_2node_hook_list *entry)
388 {
389 struct cgraph_2node_hook_list **ptr = &first_cgraph_node_duplicated_hook;
390
391 while (*ptr != entry)
392 ptr = &(*ptr)->next;
393 *ptr = entry->next;
394 free (entry);
395 }
396
397 /* Call all node duplication hooks. */
398 static void
399 cgraph_call_node_duplication_hooks (struct cgraph_node *node1,
400 struct cgraph_node *node2)
401 {
402 struct cgraph_2node_hook_list *entry = first_cgraph_node_duplicated_hook;
403 while (entry)
404 {
405 entry->hook (node1, node2, entry->data);
406 entry = entry->next;
407 }
408 }
409
410 /* Returns a hash code for P. */
411
412 static hashval_t
413 hash_node (const void *p)
414 {
415 const struct cgraph_node *n = (const struct cgraph_node *) p;
416 return (hashval_t) DECL_UID (n->decl);
417 }
418
419
420 /* Returns nonzero if P1 and P2 are equal. */
421
422 static int
423 eq_node (const void *p1, const void *p2)
424 {
425 const struct cgraph_node *n1 = (const struct cgraph_node *) p1;
426 const struct cgraph_node *n2 = (const struct cgraph_node *) p2;
427 return DECL_UID (n1->decl) == DECL_UID (n2->decl);
428 }
429
430 /* Allocate new callgraph node. */
431
432 static inline struct cgraph_node *
433 cgraph_allocate_node (void)
434 {
435 struct cgraph_node *node;
436
437 if (free_nodes)
438 {
439 node = free_nodes;
440 free_nodes = NEXT_FREE_NODE (node);
441 }
442 else
443 {
444 node = GGC_CNEW (struct cgraph_node);
445 node->uid = cgraph_max_uid++;
446 }
447
448 return node;
449 }
450
451 /* Allocate new callgraph node and insert it into basic data structures. */
452
453 static struct cgraph_node *
454 cgraph_create_node (void)
455 {
456 struct cgraph_node *node = cgraph_allocate_node ();
457
458 node->next = cgraph_nodes;
459 node->pid = -1;
460 node->order = cgraph_order++;
461 if (cgraph_nodes)
462 cgraph_nodes->previous = node;
463 node->previous = NULL;
464 node->global.estimated_growth = INT_MIN;
465 node->frequency = NODE_FREQUENCY_NORMAL;
466 cgraph_nodes = node;
467 cgraph_n_nodes++;
468 return node;
469 }
470
471 /* Return cgraph node assigned to DECL. Create new one when needed. */
472
473 struct cgraph_node *
474 cgraph_node (tree decl)
475 {
476 struct cgraph_node key, *node, **slot;
477
478 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
479
480 if (!cgraph_hash)
481 cgraph_hash = htab_create_ggc (10, hash_node, eq_node, NULL);
482
483 key.decl = decl;
484
485 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
486
487 if (*slot)
488 {
489 node = *slot;
490 if (node->same_body_alias)
491 node = node->same_body;
492 return node;
493 }
494
495 node = cgraph_create_node ();
496 node->decl = decl;
497 *slot = node;
498 if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
499 {
500 node->origin = cgraph_node (DECL_CONTEXT (decl));
501 node->next_nested = node->origin->nested;
502 node->origin->nested = node;
503 }
504 if (assembler_name_hash)
505 {
506 void **aslot;
507 tree name = DECL_ASSEMBLER_NAME (decl);
508
509 aslot = htab_find_slot_with_hash (assembler_name_hash, name,
510 decl_assembler_name_hash (name),
511 INSERT);
512 /* We can have multiple declarations with same assembler name. For C++
513 it is __builtin_strlen and strlen, for instance. Do we need to
514 record them all? Original implementation marked just first one
515 so lets hope for the best. */
516 if (*aslot == NULL)
517 *aslot = node;
518 }
519 return node;
520 }
521
522 /* Mark ALIAS as an alias to DECL. */
523
524 static struct cgraph_node *
525 cgraph_same_body_alias_1 (tree alias, tree decl)
526 {
527 struct cgraph_node key, *alias_node, *decl_node, **slot;
528
529 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
530 gcc_assert (TREE_CODE (alias) == FUNCTION_DECL);
531 decl_node = cgraph_node (decl);
532
533 key.decl = alias;
534
535 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
536
537 /* If the cgraph_node has been already created, fail. */
538 if (*slot)
539 return NULL;
540
541 alias_node = cgraph_allocate_node ();
542 alias_node->decl = alias;
543 alias_node->same_body_alias = 1;
544 alias_node->same_body = decl_node;
545 alias_node->previous = NULL;
546 if (decl_node->same_body)
547 decl_node->same_body->previous = alias_node;
548 alias_node->next = decl_node->same_body;
549 alias_node->thunk.alias = decl;
550 decl_node->same_body = alias_node;
551 *slot = alias_node;
552 return alias_node;
553 }
554
555 /* Attempt to mark ALIAS as an alias to DECL. Return TRUE if successful.
556 Same body aliases are output whenever the body of DECL is output,
557 and cgraph_node (ALIAS) transparently returns cgraph_node (DECL). */
558
559 bool
560 cgraph_same_body_alias (tree alias, tree decl)
561 {
562 #ifndef ASM_OUTPUT_DEF
563 /* If aliases aren't supported by the assembler, fail. */
564 return false;
565 #endif
566
567 /*gcc_assert (!assembler_name_hash);*/
568
569 return cgraph_same_body_alias_1 (alias, decl) != NULL;
570 }
571
572 void
573 cgraph_add_thunk (tree alias, tree decl, bool this_adjusting,
574 HOST_WIDE_INT fixed_offset, HOST_WIDE_INT virtual_value,
575 tree virtual_offset,
576 tree real_alias)
577 {
578 struct cgraph_node *node = cgraph_get_node (alias);
579
580 if (node)
581 {
582 gcc_assert (node->local.finalized);
583 gcc_assert (!node->same_body);
584 cgraph_remove_node (node);
585 }
586
587 node = cgraph_same_body_alias_1 (alias, decl);
588 gcc_assert (node);
589 #ifdef ENABLE_CHECKING
590 gcc_assert (!virtual_offset
591 || tree_int_cst_equal (virtual_offset, size_int (virtual_value)));
592 #endif
593 node->thunk.fixed_offset = fixed_offset;
594 node->thunk.this_adjusting = this_adjusting;
595 node->thunk.virtual_value = virtual_value;
596 node->thunk.virtual_offset_p = virtual_offset != NULL;
597 node->thunk.alias = real_alias;
598 node->thunk.thunk_p = true;
599 }
600
601 /* Returns the cgraph node assigned to DECL or NULL if no cgraph node
602 is assigned. */
603
604 struct cgraph_node *
605 cgraph_get_node (tree decl)
606 {
607 struct cgraph_node key, *node = NULL, **slot;
608
609 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
610
611 if (!cgraph_hash)
612 return NULL;
613
614 key.decl = decl;
615
616 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key,
617 NO_INSERT);
618
619 if (slot && *slot)
620 {
621 node = *slot;
622 if (node->same_body_alias)
623 node = node->same_body;
624 }
625 return node;
626 }
627
628 /* Insert already constructed node into hashtable. */
629
630 void
631 cgraph_insert_node_to_hashtable (struct cgraph_node *node)
632 {
633 struct cgraph_node **slot;
634
635 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, node, INSERT);
636
637 gcc_assert (!*slot);
638 *slot = node;
639 }
640
641 /* Returns a hash code for P. */
642
643 static hashval_t
644 hash_node_by_assembler_name (const void *p)
645 {
646 const struct cgraph_node *n = (const struct cgraph_node *) p;
647 return (hashval_t) decl_assembler_name_hash (DECL_ASSEMBLER_NAME (n->decl));
648 }
649
650 /* Returns nonzero if P1 and P2 are equal. */
651
652 static int
653 eq_assembler_name (const void *p1, const void *p2)
654 {
655 const struct cgraph_node *n1 = (const struct cgraph_node *) p1;
656 const_tree name = (const_tree)p2;
657 return (decl_assembler_name_equal (n1->decl, name));
658 }
659
660 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
661 Return NULL if there's no such node. */
662
663 struct cgraph_node *
664 cgraph_node_for_asm (tree asmname)
665 {
666 struct cgraph_node *node;
667 void **slot;
668
669 if (!assembler_name_hash)
670 {
671 assembler_name_hash =
672 htab_create_ggc (10, hash_node_by_assembler_name, eq_assembler_name,
673 NULL);
674 for (node = cgraph_nodes; node; node = node->next)
675 if (!node->global.inlined_to)
676 {
677 tree name = DECL_ASSEMBLER_NAME (node->decl);
678 slot = htab_find_slot_with_hash (assembler_name_hash, name,
679 decl_assembler_name_hash (name),
680 INSERT);
681 /* We can have multiple declarations with same assembler name. For C++
682 it is __builtin_strlen and strlen, for instance. Do we need to
683 record them all? Original implementation marked just first one
684 so lets hope for the best. */
685 if (!*slot)
686 *slot = node;
687 if (node->same_body)
688 {
689 struct cgraph_node *alias;
690
691 for (alias = node->same_body; alias; alias = alias->next)
692 {
693 hashval_t hash;
694 name = DECL_ASSEMBLER_NAME (alias->decl);
695 hash = decl_assembler_name_hash (name);
696 slot = htab_find_slot_with_hash (assembler_name_hash, name,
697 hash, INSERT);
698 if (!*slot)
699 *slot = alias;
700 }
701 }
702 }
703 }
704
705 slot = htab_find_slot_with_hash (assembler_name_hash, asmname,
706 decl_assembler_name_hash (asmname),
707 NO_INSERT);
708
709 if (slot)
710 {
711 node = (struct cgraph_node *) *slot;
712 if (node->same_body_alias)
713 node = node->same_body;
714 return node;
715 }
716 return NULL;
717 }
718
719 /* Returns a hash value for X (which really is a die_struct). */
720
721 static hashval_t
722 edge_hash (const void *x)
723 {
724 return htab_hash_pointer (((const struct cgraph_edge *) x)->call_stmt);
725 }
726
727 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
728
729 static int
730 edge_eq (const void *x, const void *y)
731 {
732 return ((const struct cgraph_edge *) x)->call_stmt == y;
733 }
734
735 /* Add call graph edge E to call site hash of its caller. */
736
737 static inline void
738 cgraph_add_edge_to_call_site_hash (struct cgraph_edge *e)
739 {
740 void **slot;
741 slot = htab_find_slot_with_hash (e->caller->call_site_hash,
742 e->call_stmt,
743 htab_hash_pointer (e->call_stmt),
744 INSERT);
745 gcc_assert (!*slot);
746 *slot = e;
747 }
748
749 /* Return the callgraph edge representing the GIMPLE_CALL statement
750 CALL_STMT. */
751
752 struct cgraph_edge *
753 cgraph_edge (struct cgraph_node *node, gimple call_stmt)
754 {
755 struct cgraph_edge *e, *e2;
756 int n = 0;
757
758 if (node->call_site_hash)
759 return (struct cgraph_edge *)
760 htab_find_with_hash (node->call_site_hash, call_stmt,
761 htab_hash_pointer (call_stmt));
762
763 /* This loop may turn out to be performance problem. In such case adding
764 hashtables into call nodes with very many edges is probably best
765 solution. It is not good idea to add pointer into CALL_EXPR itself
766 because we want to make possible having multiple cgraph nodes representing
767 different clones of the same body before the body is actually cloned. */
768 for (e = node->callees; e; e = e->next_callee)
769 {
770 if (e->call_stmt == call_stmt)
771 break;
772 n++;
773 }
774
775 if (!e)
776 for (e = node->indirect_calls; e; e = e->next_callee)
777 {
778 if (e->call_stmt == call_stmt)
779 break;
780 n++;
781 }
782
783 if (n > 100)
784 {
785 node->call_site_hash = htab_create_ggc (120, edge_hash, edge_eq, NULL);
786 for (e2 = node->callees; e2; e2 = e2->next_callee)
787 cgraph_add_edge_to_call_site_hash (e2);
788 for (e2 = node->indirect_calls; e2; e2 = e2->next_callee)
789 cgraph_add_edge_to_call_site_hash (e2);
790 }
791
792 return e;
793 }
794
795
796 /* Change field call_stmt of edge E to NEW_STMT. */
797
798 void
799 cgraph_set_call_stmt (struct cgraph_edge *e, gimple new_stmt)
800 {
801 tree decl;
802
803 if (e->caller->call_site_hash)
804 {
805 htab_remove_elt_with_hash (e->caller->call_site_hash,
806 e->call_stmt,
807 htab_hash_pointer (e->call_stmt));
808 }
809
810 e->call_stmt = new_stmt;
811 if (e->indirect_unknown_callee
812 && (decl = gimple_call_fndecl (new_stmt)))
813 {
814 /* Constant propagation (and possibly also inlining?) can turn an
815 indirect call into a direct one. */
816 struct cgraph_node *new_callee = cgraph_node (decl);
817
818 cgraph_make_edge_direct (e, new_callee);
819 }
820
821 push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
822 e->can_throw_external = stmt_can_throw_external (new_stmt);
823 pop_cfun ();
824 if (e->caller->call_site_hash)
825 cgraph_add_edge_to_call_site_hash (e);
826 }
827
828 /* Like cgraph_set_call_stmt but walk the clone tree and update all
829 clones sharing the same function body. */
830
831 void
832 cgraph_set_call_stmt_including_clones (struct cgraph_node *orig,
833 gimple old_stmt, gimple new_stmt)
834 {
835 struct cgraph_node *node;
836 struct cgraph_edge *edge = cgraph_edge (orig, old_stmt);
837
838 if (edge)
839 cgraph_set_call_stmt (edge, new_stmt);
840
841 node = orig->clones;
842 if (node)
843 while (node != orig)
844 {
845 struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
846 if (edge)
847 cgraph_set_call_stmt (edge, new_stmt);
848 if (node->clones)
849 node = node->clones;
850 else if (node->next_sibling_clone)
851 node = node->next_sibling_clone;
852 else
853 {
854 while (node != orig && !node->next_sibling_clone)
855 node = node->clone_of;
856 if (node != orig)
857 node = node->next_sibling_clone;
858 }
859 }
860 }
861
862 /* Like cgraph_create_edge walk the clone tree and update all clones sharing
863 same function body. If clones already have edge for OLD_STMT; only
864 update the edge same way as cgraph_set_call_stmt_including_clones does.
865
866 TODO: COUNT and LOOP_DEPTH should be properly distributed based on relative
867 frequencies of the clones. */
868
869 void
870 cgraph_create_edge_including_clones (struct cgraph_node *orig,
871 struct cgraph_node *callee,
872 gimple old_stmt,
873 gimple stmt, gcov_type count,
874 int freq, int loop_depth,
875 cgraph_inline_failed_t reason)
876 {
877 struct cgraph_node *node;
878 struct cgraph_edge *edge;
879
880 if (!cgraph_edge (orig, stmt))
881 {
882 edge = cgraph_create_edge (orig, callee, stmt, count, freq, loop_depth);
883 edge->inline_failed = reason;
884 }
885
886 node = orig->clones;
887 if (node)
888 while (node != orig)
889 {
890 struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
891
892 /* It is possible that clones already contain the edge while
893 master didn't. Either we promoted indirect call into direct
894 call in the clone or we are processing clones of unreachable
895 master where edges has been rmeoved. */
896 if (edge)
897 cgraph_set_call_stmt (edge, stmt);
898 else if (!cgraph_edge (node, stmt))
899 {
900 edge = cgraph_create_edge (node, callee, stmt, count,
901 freq, loop_depth);
902 edge->inline_failed = reason;
903 }
904
905 if (node->clones)
906 node = node->clones;
907 else if (node->next_sibling_clone)
908 node = node->next_sibling_clone;
909 else
910 {
911 while (node != orig && !node->next_sibling_clone)
912 node = node->clone_of;
913 if (node != orig)
914 node = node->next_sibling_clone;
915 }
916 }
917 }
918
919 /* Give initial reasons why inlining would fail on EDGE. This gets either
920 nullified or usually overwritten by more precise reasons later. */
921
922 static void
923 initialize_inline_failed (struct cgraph_edge *e)
924 {
925 struct cgraph_node *callee = e->callee;
926
927 if (e->indirect_unknown_callee)
928 e->inline_failed = CIF_INDIRECT_UNKNOWN_CALL;
929 else if (!callee->analyzed)
930 e->inline_failed = CIF_BODY_NOT_AVAILABLE;
931 else if (callee->local.redefined_extern_inline)
932 e->inline_failed = CIF_REDEFINED_EXTERN_INLINE;
933 else if (!callee->local.inlinable)
934 e->inline_failed = CIF_FUNCTION_NOT_INLINABLE;
935 else if (e->call_stmt && gimple_call_cannot_inline_p (e->call_stmt))
936 e->inline_failed = CIF_MISMATCHED_ARGUMENTS;
937 else
938 e->inline_failed = CIF_FUNCTION_NOT_CONSIDERED;
939 }
940
941 /* Allocate a cgraph_edge structure and fill it with data according to the
942 parameters of which only CALLEE can be NULL (when creating an indirect call
943 edge). */
944
945 static struct cgraph_edge *
946 cgraph_create_edge_1 (struct cgraph_node *caller, struct cgraph_node *callee,
947 gimple call_stmt, gcov_type count, int freq, int nest)
948 {
949 struct cgraph_edge *edge;
950
951 /* LTO does not actually have access to the call_stmt since these
952 have not been loaded yet. */
953 if (call_stmt)
954 {
955 #ifdef ENABLE_CHECKING
956 /* This is rather pricely check possibly trigerring construction of
957 call stmt hashtable. */
958 gcc_assert (!cgraph_edge (caller, call_stmt));
959 #endif
960
961 gcc_assert (is_gimple_call (call_stmt));
962 }
963
964 if (free_edges)
965 {
966 edge = free_edges;
967 free_edges = NEXT_FREE_EDGE (edge);
968 }
969 else
970 {
971 edge = GGC_NEW (struct cgraph_edge);
972 edge->uid = cgraph_edge_max_uid++;
973 }
974
975 edge->aux = NULL;
976 edge->caller = caller;
977 edge->callee = callee;
978 edge->prev_caller = NULL;
979 edge->next_caller = NULL;
980 edge->prev_callee = NULL;
981 edge->next_callee = NULL;
982
983 edge->count = count;
984 gcc_assert (count >= 0);
985 edge->frequency = freq;
986 gcc_assert (freq >= 0);
987 gcc_assert (freq <= CGRAPH_FREQ_MAX);
988 edge->loop_nest = nest;
989
990 edge->call_stmt = call_stmt;
991 push_cfun (DECL_STRUCT_FUNCTION (caller->decl));
992 edge->can_throw_external
993 = call_stmt ? stmt_can_throw_external (call_stmt) : false;
994 pop_cfun ();
995 edge->call_stmt_cannot_inline_p =
996 (call_stmt ? gimple_call_cannot_inline_p (call_stmt) : false);
997 if (call_stmt && caller->call_site_hash)
998 cgraph_add_edge_to_call_site_hash (edge);
999
1000 edge->indirect_info = NULL;
1001 edge->indirect_inlining_edge = 0;
1002
1003 return edge;
1004 }
1005
1006 /* Create edge from CALLER to CALLEE in the cgraph. */
1007
1008 struct cgraph_edge *
1009 cgraph_create_edge (struct cgraph_node *caller, struct cgraph_node *callee,
1010 gimple call_stmt, gcov_type count, int freq, int nest)
1011 {
1012 struct cgraph_edge *edge = cgraph_create_edge_1 (caller, callee, call_stmt,
1013 count, freq, nest);
1014
1015 edge->indirect_unknown_callee = 0;
1016 initialize_inline_failed (edge);
1017
1018 edge->next_caller = callee->callers;
1019 if (callee->callers)
1020 callee->callers->prev_caller = edge;
1021 edge->next_callee = caller->callees;
1022 if (caller->callees)
1023 caller->callees->prev_callee = edge;
1024 caller->callees = edge;
1025 callee->callers = edge;
1026
1027 return edge;
1028 }
1029
1030
1031 /* Create an indirect edge with a yet-undetermined callee where the call
1032 statement destination is a formal parameter of the caller with index
1033 PARAM_INDEX. */
1034
1035 struct cgraph_edge *
1036 cgraph_create_indirect_edge (struct cgraph_node *caller, gimple call_stmt,
1037 gcov_type count, int freq, int nest)
1038 {
1039 struct cgraph_edge *edge = cgraph_create_edge_1 (caller, NULL, call_stmt,
1040 count, freq, nest);
1041
1042 edge->indirect_unknown_callee = 1;
1043 initialize_inline_failed (edge);
1044
1045 edge->indirect_info = GGC_NEW (struct cgraph_indirect_call_info);
1046 edge->indirect_info->param_index = -1;
1047
1048 edge->next_callee = caller->indirect_calls;
1049 if (caller->indirect_calls)
1050 caller->indirect_calls->prev_callee = edge;
1051 caller->indirect_calls = edge;
1052
1053 return edge;
1054 }
1055
1056 /* Remove the edge E from the list of the callers of the callee. */
1057
1058 static inline void
1059 cgraph_edge_remove_callee (struct cgraph_edge *e)
1060 {
1061 gcc_assert (!e->indirect_unknown_callee);
1062 if (e->prev_caller)
1063 e->prev_caller->next_caller = e->next_caller;
1064 if (e->next_caller)
1065 e->next_caller->prev_caller = e->prev_caller;
1066 if (!e->prev_caller)
1067 e->callee->callers = e->next_caller;
1068 }
1069
1070 /* Remove the edge E from the list of the callees of the caller. */
1071
1072 static inline void
1073 cgraph_edge_remove_caller (struct cgraph_edge *e)
1074 {
1075 if (e->prev_callee)
1076 e->prev_callee->next_callee = e->next_callee;
1077 if (e->next_callee)
1078 e->next_callee->prev_callee = e->prev_callee;
1079 if (!e->prev_callee)
1080 {
1081 if (e->indirect_unknown_callee)
1082 e->caller->indirect_calls = e->next_callee;
1083 else
1084 e->caller->callees = e->next_callee;
1085 }
1086 if (e->caller->call_site_hash)
1087 htab_remove_elt_with_hash (e->caller->call_site_hash,
1088 e->call_stmt,
1089 htab_hash_pointer (e->call_stmt));
1090 }
1091
1092 /* Put the edge onto the free list. */
1093
1094 static void
1095 cgraph_free_edge (struct cgraph_edge *e)
1096 {
1097 int uid = e->uid;
1098
1099 /* Clear out the edge so we do not dangle pointers. */
1100 memset (e, 0, sizeof (*e));
1101 e->uid = uid;
1102 NEXT_FREE_EDGE (e) = free_edges;
1103 free_edges = e;
1104 }
1105
1106 /* Remove the edge E in the cgraph. */
1107
1108 void
1109 cgraph_remove_edge (struct cgraph_edge *e)
1110 {
1111 /* Call all edge removal hooks. */
1112 cgraph_call_edge_removal_hooks (e);
1113
1114 if (!e->indirect_unknown_callee)
1115 /* Remove from callers list of the callee. */
1116 cgraph_edge_remove_callee (e);
1117
1118 /* Remove from callees list of the callers. */
1119 cgraph_edge_remove_caller (e);
1120
1121 /* Put the edge onto the free list. */
1122 cgraph_free_edge (e);
1123 }
1124
1125 /* Set callee of call graph edge E and add it to the corresponding set of
1126 callers. */
1127
1128 static void
1129 cgraph_set_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
1130 {
1131 e->prev_caller = NULL;
1132 if (n->callers)
1133 n->callers->prev_caller = e;
1134 e->next_caller = n->callers;
1135 n->callers = e;
1136 e->callee = n;
1137 }
1138
1139 /* Redirect callee of E to N. The function does not update underlying
1140 call expression. */
1141
1142 void
1143 cgraph_redirect_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
1144 {
1145 /* Remove from callers list of the current callee. */
1146 cgraph_edge_remove_callee (e);
1147
1148 /* Insert to callers list of the new callee. */
1149 cgraph_set_edge_callee (e, n);
1150 }
1151
1152 /* Make an indirect EDGE with an unknown callee an ordinary edge leading to
1153 CALLEE. */
1154
1155 void
1156 cgraph_make_edge_direct (struct cgraph_edge *edge, struct cgraph_node *callee)
1157 {
1158 edge->indirect_unknown_callee = 0;
1159
1160 /* Get the edge out of the indirect edge list. */
1161 if (edge->prev_callee)
1162 edge->prev_callee->next_callee = edge->next_callee;
1163 if (edge->next_callee)
1164 edge->next_callee->prev_callee = edge->prev_callee;
1165 if (!edge->prev_callee)
1166 edge->caller->indirect_calls = edge->next_callee;
1167
1168 /* Put it into the normal callee list */
1169 edge->prev_callee = NULL;
1170 edge->next_callee = edge->caller->callees;
1171 if (edge->caller->callees)
1172 edge->caller->callees->prev_callee = edge;
1173 edge->caller->callees = edge;
1174
1175 /* Insert to callers list of the new callee. */
1176 cgraph_set_edge_callee (edge, callee);
1177
1178 /* We need to re-determine the inlining status of the edge. */
1179 initialize_inline_failed (edge);
1180 }
1181
1182
1183 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1184 OLD_STMT changed into NEW_STMT. OLD_CALL is gimple_call_fndecl
1185 of OLD_STMT if it was previously call statement. */
1186
1187 static void
1188 cgraph_update_edges_for_call_stmt_node (struct cgraph_node *node,
1189 gimple old_stmt, tree old_call, gimple new_stmt)
1190 {
1191 tree new_call = (is_gimple_call (new_stmt)) ? gimple_call_fndecl (new_stmt) : 0;
1192
1193 /* We are seeing indirect calls, then there is nothing to update. */
1194 if (!new_call && !old_call)
1195 return;
1196 /* See if we turned indirect call into direct call or folded call to one builtin
1197 into different bultin. */
1198 if (old_call != new_call)
1199 {
1200 struct cgraph_edge *e = cgraph_edge (node, old_stmt);
1201 struct cgraph_edge *ne = NULL;
1202 gcov_type count;
1203 int frequency;
1204 int loop_nest;
1205
1206 if (e)
1207 {
1208 /* See if the edge is already there and has the correct callee. It
1209 might be so because of indirect inlining has already updated
1210 it. */
1211 if (new_call && e->callee && e->callee->decl == new_call)
1212 return;
1213
1214 /* Otherwise remove edge and create new one; we can't simply redirect
1215 since function has changed, so inline plan and other information
1216 attached to edge is invalid. */
1217 count = e->count;
1218 frequency = e->frequency;
1219 loop_nest = e->loop_nest;
1220 cgraph_remove_edge (e);
1221 }
1222 else
1223 {
1224 /* We are seeing new direct call; compute profile info based on BB. */
1225 basic_block bb = gimple_bb (new_stmt);
1226 count = bb->count;
1227 frequency = compute_call_stmt_bb_frequency (current_function_decl,
1228 bb);
1229 loop_nest = bb->loop_depth;
1230 }
1231
1232 if (new_call)
1233 {
1234 ne = cgraph_create_edge (node, cgraph_node (new_call),
1235 new_stmt, count, frequency,
1236 loop_nest);
1237 gcc_assert (ne->inline_failed);
1238 }
1239 }
1240 /* We only updated the call stmt; update pointer in cgraph edge.. */
1241 else if (old_stmt != new_stmt)
1242 cgraph_set_call_stmt (cgraph_edge (node, old_stmt), new_stmt);
1243 }
1244
1245 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1246 OLD_STMT changed into NEW_STMT. OLD_DECL is gimple_call_fndecl
1247 of OLD_STMT before it was updated (updating can happen inplace). */
1248
1249 void
1250 cgraph_update_edges_for_call_stmt (gimple old_stmt, tree old_decl, gimple new_stmt)
1251 {
1252 struct cgraph_node *orig = cgraph_node (cfun->decl);
1253 struct cgraph_node *node;
1254
1255 cgraph_update_edges_for_call_stmt_node (orig, old_stmt, old_decl, new_stmt);
1256 if (orig->clones)
1257 for (node = orig->clones; node != orig;)
1258 {
1259 cgraph_update_edges_for_call_stmt_node (node, old_stmt, old_decl, new_stmt);
1260 if (node->clones)
1261 node = node->clones;
1262 else if (node->next_sibling_clone)
1263 node = node->next_sibling_clone;
1264 else
1265 {
1266 while (node != orig && !node->next_sibling_clone)
1267 node = node->clone_of;
1268 if (node != orig)
1269 node = node->next_sibling_clone;
1270 }
1271 }
1272 }
1273
1274
1275 /* Remove all callees from the node. */
1276
1277 void
1278 cgraph_node_remove_callees (struct cgraph_node *node)
1279 {
1280 struct cgraph_edge *e, *f;
1281
1282 /* It is sufficient to remove the edges from the lists of callers of
1283 the callees. The callee list of the node can be zapped with one
1284 assignment. */
1285 for (e = node->callees; e; e = f)
1286 {
1287 f = e->next_callee;
1288 cgraph_call_edge_removal_hooks (e);
1289 if (!e->indirect_unknown_callee)
1290 cgraph_edge_remove_callee (e);
1291 cgraph_free_edge (e);
1292 }
1293 node->callees = NULL;
1294 if (node->call_site_hash)
1295 {
1296 htab_delete (node->call_site_hash);
1297 node->call_site_hash = NULL;
1298 }
1299 }
1300
1301 /* Remove all callers from the node. */
1302
1303 static void
1304 cgraph_node_remove_callers (struct cgraph_node *node)
1305 {
1306 struct cgraph_edge *e, *f;
1307
1308 /* It is sufficient to remove the edges from the lists of callees of
1309 the callers. The caller list of the node can be zapped with one
1310 assignment. */
1311 for (e = node->callers; e; e = f)
1312 {
1313 f = e->next_caller;
1314 cgraph_call_edge_removal_hooks (e);
1315 cgraph_edge_remove_caller (e);
1316 cgraph_free_edge (e);
1317 }
1318 node->callers = NULL;
1319 }
1320
1321 /* Release memory used to represent body of function NODE. */
1322
1323 void
1324 cgraph_release_function_body (struct cgraph_node *node)
1325 {
1326 if (DECL_STRUCT_FUNCTION (node->decl))
1327 {
1328 tree old_decl = current_function_decl;
1329 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
1330 if (cfun->gimple_df)
1331 {
1332 current_function_decl = node->decl;
1333 delete_tree_ssa ();
1334 delete_tree_cfg_annotations ();
1335 cfun->eh = NULL;
1336 current_function_decl = old_decl;
1337 }
1338 if (cfun->cfg)
1339 {
1340 gcc_assert (dom_computed[0] == DOM_NONE);
1341 gcc_assert (dom_computed[1] == DOM_NONE);
1342 clear_edges ();
1343 }
1344 if (cfun->value_histograms)
1345 free_histograms ();
1346 gcc_assert (!current_loops);
1347 pop_cfun();
1348 gimple_set_body (node->decl, NULL);
1349 VEC_free (ipa_opt_pass, heap,
1350 node->ipa_transforms_to_apply);
1351 /* Struct function hangs a lot of data that would leak if we didn't
1352 removed all pointers to it. */
1353 ggc_free (DECL_STRUCT_FUNCTION (node->decl));
1354 DECL_STRUCT_FUNCTION (node->decl) = NULL;
1355 }
1356 DECL_SAVED_TREE (node->decl) = NULL;
1357 /* If the node is abstract and needed, then do not clear DECL_INITIAL
1358 of its associated function function declaration because it's
1359 needed to emit debug info later. */
1360 if (!node->abstract_and_needed)
1361 DECL_INITIAL (node->decl) = error_mark_node;
1362 }
1363
1364 /* Remove same body alias node. */
1365
1366 void
1367 cgraph_remove_same_body_alias (struct cgraph_node *node)
1368 {
1369 void **slot;
1370 int uid = node->uid;
1371
1372 gcc_assert (node->same_body_alias);
1373 if (node->previous)
1374 node->previous->next = node->next;
1375 else
1376 node->same_body->same_body = node->next;
1377 if (node->next)
1378 node->next->previous = node->previous;
1379 node->next = NULL;
1380 node->previous = NULL;
1381 slot = htab_find_slot (cgraph_hash, node, NO_INSERT);
1382 if (*slot == node)
1383 htab_clear_slot (cgraph_hash, slot);
1384 if (assembler_name_hash)
1385 {
1386 tree name = DECL_ASSEMBLER_NAME (node->decl);
1387 slot = htab_find_slot_with_hash (assembler_name_hash, name,
1388 decl_assembler_name_hash (name),
1389 NO_INSERT);
1390 if (slot && *slot == node)
1391 htab_clear_slot (assembler_name_hash, slot);
1392 }
1393
1394 /* Clear out the node to NULL all pointers and add the node to the free
1395 list. */
1396 memset (node, 0, sizeof(*node));
1397 node->uid = uid;
1398 NEXT_FREE_NODE (node) = free_nodes;
1399 free_nodes = node;
1400 }
1401
1402 /* Remove the node from cgraph. */
1403
1404 void
1405 cgraph_remove_node (struct cgraph_node *node)
1406 {
1407 void **slot;
1408 bool kill_body = false;
1409 struct cgraph_node *n;
1410 int uid = node->uid;
1411
1412 cgraph_call_node_removal_hooks (node);
1413 cgraph_node_remove_callers (node);
1414 cgraph_node_remove_callees (node);
1415 VEC_free (ipa_opt_pass, heap,
1416 node->ipa_transforms_to_apply);
1417
1418 /* Incremental inlining access removed nodes stored in the postorder list.
1419 */
1420 node->needed = node->reachable = false;
1421 for (n = node->nested; n; n = n->next_nested)
1422 n->origin = NULL;
1423 node->nested = NULL;
1424 if (node->origin)
1425 {
1426 struct cgraph_node **node2 = &node->origin->nested;
1427
1428 while (*node2 != node)
1429 node2 = &(*node2)->next_nested;
1430 *node2 = node->next_nested;
1431 }
1432 if (node->previous)
1433 node->previous->next = node->next;
1434 else
1435 cgraph_nodes = node->next;
1436 if (node->next)
1437 node->next->previous = node->previous;
1438 node->next = NULL;
1439 node->previous = NULL;
1440 slot = htab_find_slot (cgraph_hash, node, NO_INSERT);
1441 if (*slot == node)
1442 {
1443 struct cgraph_node *next_inline_clone;
1444
1445 for (next_inline_clone = node->clones;
1446 next_inline_clone && next_inline_clone->decl != node->decl;
1447 next_inline_clone = next_inline_clone->next_sibling_clone)
1448 ;
1449
1450 /* If there is inline clone of the node being removed, we need
1451 to put it into the position of removed node and reorganize all
1452 other clones to be based on it. */
1453 if (next_inline_clone)
1454 {
1455 struct cgraph_node *n;
1456 struct cgraph_node *new_clones;
1457
1458 *slot = next_inline_clone;
1459
1460 /* Unlink inline clone from the list of clones of removed node. */
1461 if (next_inline_clone->next_sibling_clone)
1462 next_inline_clone->next_sibling_clone->prev_sibling_clone
1463 = next_inline_clone->prev_sibling_clone;
1464 if (next_inline_clone->prev_sibling_clone)
1465 {
1466 gcc_assert (node->clones != next_inline_clone);
1467 next_inline_clone->prev_sibling_clone->next_sibling_clone
1468 = next_inline_clone->next_sibling_clone;
1469 }
1470 else
1471 {
1472 gcc_assert (node->clones == next_inline_clone);
1473 node->clones = next_inline_clone->next_sibling_clone;
1474 }
1475
1476 new_clones = node->clones;
1477 node->clones = NULL;
1478
1479 /* Copy clone info. */
1480 next_inline_clone->clone = node->clone;
1481
1482 /* Now place it into clone tree at same level at NODE. */
1483 next_inline_clone->clone_of = node->clone_of;
1484 next_inline_clone->prev_sibling_clone = NULL;
1485 next_inline_clone->next_sibling_clone = NULL;
1486 if (node->clone_of)
1487 {
1488 if (node->clone_of->clones)
1489 node->clone_of->clones->prev_sibling_clone = next_inline_clone;
1490 next_inline_clone->next_sibling_clone = node->clone_of->clones;
1491 node->clone_of->clones = next_inline_clone;
1492 }
1493
1494 /* Merge the clone list. */
1495 if (new_clones)
1496 {
1497 if (!next_inline_clone->clones)
1498 next_inline_clone->clones = new_clones;
1499 else
1500 {
1501 n = next_inline_clone->clones;
1502 while (n->next_sibling_clone)
1503 n = n->next_sibling_clone;
1504 n->next_sibling_clone = new_clones;
1505 new_clones->prev_sibling_clone = n;
1506 }
1507 }
1508
1509 /* Update clone_of pointers. */
1510 n = new_clones;
1511 while (n)
1512 {
1513 n->clone_of = next_inline_clone;
1514 n = n->next_sibling_clone;
1515 }
1516 }
1517 else
1518 {
1519 htab_clear_slot (cgraph_hash, slot);
1520 kill_body = true;
1521 }
1522
1523 }
1524 if (node->prev_sibling_clone)
1525 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
1526 else if (node->clone_of)
1527 node->clone_of->clones = node->next_sibling_clone;
1528 if (node->next_sibling_clone)
1529 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
1530 if (node->clones)
1531 {
1532 struct cgraph_node *n, *next;
1533
1534 if (node->clone_of)
1535 {
1536 for (n = node->clones; n->next_sibling_clone; n = n->next_sibling_clone)
1537 n->clone_of = node->clone_of;
1538 n->clone_of = node->clone_of;
1539 n->next_sibling_clone = node->clone_of->clones;
1540 if (node->clone_of->clones)
1541 node->clone_of->clones->prev_sibling_clone = n;
1542 node->clone_of->clones = node->clones;
1543 }
1544 else
1545 {
1546 /* We are removing node with clones. this makes clones inconsistent,
1547 but assume they will be removed subsequently and just keep clone
1548 tree intact. This can happen in unreachable function removal since
1549 we remove unreachable functions in random order, not by bottom-up
1550 walk of clone trees. */
1551 for (n = node->clones; n; n = next)
1552 {
1553 next = n->next_sibling_clone;
1554 n->next_sibling_clone = NULL;
1555 n->prev_sibling_clone = NULL;
1556 n->clone_of = NULL;
1557 }
1558 }
1559 }
1560
1561 while (node->same_body)
1562 cgraph_remove_same_body_alias (node->same_body);
1563
1564 if (node->same_comdat_group)
1565 {
1566 struct cgraph_node *prev;
1567 for (prev = node->same_comdat_group;
1568 prev->same_comdat_group != node;
1569 prev = prev->same_comdat_group)
1570 ;
1571 if (node->same_comdat_group == prev)
1572 prev->same_comdat_group = NULL;
1573 else
1574 prev->same_comdat_group = node->same_comdat_group;
1575 node->same_comdat_group = NULL;
1576 }
1577
1578 /* While all the clones are removed after being proceeded, the function
1579 itself is kept in the cgraph even after it is compiled. Check whether
1580 we are done with this body and reclaim it proactively if this is the case.
1581 */
1582 if (!kill_body && *slot)
1583 {
1584 struct cgraph_node *n = (struct cgraph_node *) *slot;
1585 if (!n->clones && !n->clone_of && !n->global.inlined_to
1586 && (cgraph_global_info_ready
1587 && (TREE_ASM_WRITTEN (n->decl) || DECL_EXTERNAL (n->decl)
1588 || n->in_other_partition)))
1589 kill_body = true;
1590 }
1591 if (assembler_name_hash)
1592 {
1593 tree name = DECL_ASSEMBLER_NAME (node->decl);
1594 slot = htab_find_slot_with_hash (assembler_name_hash, name,
1595 decl_assembler_name_hash (name),
1596 NO_INSERT);
1597 /* Inline clones are not hashed. */
1598 if (slot && *slot == node)
1599 htab_clear_slot (assembler_name_hash, slot);
1600 }
1601
1602 if (kill_body)
1603 cgraph_release_function_body (node);
1604 node->decl = NULL;
1605 if (node->call_site_hash)
1606 {
1607 htab_delete (node->call_site_hash);
1608 node->call_site_hash = NULL;
1609 }
1610 cgraph_n_nodes--;
1611
1612 /* Clear out the node to NULL all pointers and add the node to the free
1613 list. */
1614 memset (node, 0, sizeof(*node));
1615 node->uid = uid;
1616 NEXT_FREE_NODE (node) = free_nodes;
1617 free_nodes = node;
1618 }
1619
1620 /* Remove the node from cgraph. */
1621
1622 void
1623 cgraph_remove_node_and_inline_clones (struct cgraph_node *node)
1624 {
1625 struct cgraph_edge *e, *next;
1626 for (e = node->callees; e; e = next)
1627 {
1628 next = e->next_callee;
1629 if (!e->inline_failed)
1630 cgraph_remove_node_and_inline_clones (e->callee);
1631 }
1632 cgraph_remove_node (node);
1633 }
1634
1635 /* Notify finalize_compilation_unit that given node is reachable. */
1636
1637 void
1638 cgraph_mark_reachable_node (struct cgraph_node *node)
1639 {
1640 if (!node->reachable && node->local.finalized)
1641 {
1642 notice_global_symbol (node->decl);
1643 node->reachable = 1;
1644 gcc_assert (!cgraph_global_info_ready);
1645
1646 node->next_needed = cgraph_nodes_queue;
1647 cgraph_nodes_queue = node;
1648 }
1649 }
1650
1651 /* Likewise indicate that a node is needed, i.e. reachable via some
1652 external means. */
1653
1654 void
1655 cgraph_mark_needed_node (struct cgraph_node *node)
1656 {
1657 node->needed = 1;
1658 gcc_assert (!node->global.inlined_to);
1659 cgraph_mark_reachable_node (node);
1660 }
1661
1662 /* Likewise indicate that a node is having address taken. */
1663
1664 void
1665 cgraph_mark_address_taken_node (struct cgraph_node *node)
1666 {
1667 node->address_taken = 1;
1668 cgraph_mark_needed_node (node);
1669 }
1670
1671 /* Return local info for the compiled function. */
1672
1673 struct cgraph_local_info *
1674 cgraph_local_info (tree decl)
1675 {
1676 struct cgraph_node *node;
1677
1678 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1679 node = cgraph_node (decl);
1680 return &node->local;
1681 }
1682
1683 /* Return local info for the compiled function. */
1684
1685 struct cgraph_global_info *
1686 cgraph_global_info (tree decl)
1687 {
1688 struct cgraph_node *node;
1689
1690 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL && cgraph_global_info_ready);
1691 node = cgraph_node (decl);
1692 return &node->global;
1693 }
1694
1695 /* Return local info for the compiled function. */
1696
1697 struct cgraph_rtl_info *
1698 cgraph_rtl_info (tree decl)
1699 {
1700 struct cgraph_node *node;
1701
1702 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1703 node = cgraph_node (decl);
1704 if (decl != current_function_decl
1705 && !TREE_ASM_WRITTEN (node->decl))
1706 return NULL;
1707 return &node->rtl;
1708 }
1709
1710 /* Return a string describing the failure REASON. */
1711
1712 const char*
1713 cgraph_inline_failed_string (cgraph_inline_failed_t reason)
1714 {
1715 #undef DEFCIFCODE
1716 #define DEFCIFCODE(code, string) string,
1717
1718 static const char *cif_string_table[CIF_N_REASONS] = {
1719 #include "cif-code.def"
1720 };
1721
1722 /* Signedness of an enum type is implementation defined, so cast it
1723 to unsigned before testing. */
1724 gcc_assert ((unsigned) reason < CIF_N_REASONS);
1725 return cif_string_table[reason];
1726 }
1727
1728 /* Return name of the node used in debug output. */
1729 const char *
1730 cgraph_node_name (struct cgraph_node *node)
1731 {
1732 return lang_hooks.decl_printable_name (node->decl, 2);
1733 }
1734
1735 /* Names used to print out the availability enum. */
1736 const char * const cgraph_availability_names[] =
1737 {"unset", "not_available", "overwritable", "available", "local"};
1738
1739
1740 /* Dump call graph node NODE to file F. */
1741
1742 void
1743 dump_cgraph_node (FILE *f, struct cgraph_node *node)
1744 {
1745 struct cgraph_edge *edge;
1746 int indirect_calls_count = 0;
1747
1748 fprintf (f, "%s/%i(%i)", cgraph_node_name (node), node->uid,
1749 node->pid);
1750 dump_addr (f, " @", (void *)node);
1751 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
1752 fprintf (f, " (asm: %s)", IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->decl)));
1753 if (node->global.inlined_to)
1754 fprintf (f, " (inline copy in %s/%i)",
1755 cgraph_node_name (node->global.inlined_to),
1756 node->global.inlined_to->uid);
1757 if (node->clone_of)
1758 fprintf (f, " (clone of %s/%i)",
1759 cgraph_node_name (node->clone_of),
1760 node->clone_of->uid);
1761 if (cgraph_function_flags_ready)
1762 fprintf (f, " availability:%s",
1763 cgraph_availability_names [cgraph_function_body_availability (node)]);
1764 if (node->analyzed)
1765 fprintf (f, " analyzed");
1766 if (node->in_other_partition)
1767 fprintf (f, " in_other_partition");
1768 if (node->count)
1769 fprintf (f, " executed "HOST_WIDEST_INT_PRINT_DEC"x",
1770 (HOST_WIDEST_INT)node->count);
1771 if (node->local.inline_summary.self_time)
1772 fprintf (f, " %i time, %i benefit", node->local.inline_summary.self_time,
1773 node->local.inline_summary.time_inlining_benefit);
1774 if (node->global.time && node->global.time
1775 != node->local.inline_summary.self_time)
1776 fprintf (f, " (%i after inlining)", node->global.time);
1777 if (node->local.inline_summary.self_size)
1778 fprintf (f, " %i size, %i benefit", node->local.inline_summary.self_size,
1779 node->local.inline_summary.size_inlining_benefit);
1780 if (node->global.size && node->global.size
1781 != node->local.inline_summary.self_size)
1782 fprintf (f, " (%i after inlining)", node->global.size);
1783 if (node->local.inline_summary.estimated_self_stack_size)
1784 fprintf (f, " %i bytes stack usage", (int)node->local.inline_summary.estimated_self_stack_size);
1785 if (node->global.estimated_stack_size != node->local.inline_summary.estimated_self_stack_size)
1786 fprintf (f, " %i bytes after inlining", (int)node->global.estimated_stack_size);
1787 if (node->origin)
1788 fprintf (f, " nested in: %s", cgraph_node_name (node->origin));
1789 if (node->needed)
1790 fprintf (f, " needed");
1791 if (node->address_taken)
1792 fprintf (f, " address_taken");
1793 else if (node->reachable)
1794 fprintf (f, " reachable");
1795 else if (node->reachable_from_other_partition)
1796 fprintf (f, " reachable_from_other_partition");
1797 if (gimple_has_body_p (node->decl))
1798 fprintf (f, " body");
1799 if (node->process)
1800 fprintf (f, " process");
1801 if (node->local.local)
1802 fprintf (f, " local");
1803 if (node->local.externally_visible)
1804 fprintf (f, " externally_visible");
1805 if (node->local.finalized)
1806 fprintf (f, " finalized");
1807 if (node->local.disregard_inline_limits)
1808 fprintf (f, " always_inline");
1809 else if (node->local.inlinable)
1810 fprintf (f, " inlinable");
1811 if (node->local.redefined_extern_inline)
1812 fprintf (f, " redefined_extern_inline");
1813 if (TREE_ASM_WRITTEN (node->decl))
1814 fprintf (f, " asm_written");
1815
1816 fprintf (f, "\n called by: ");
1817 for (edge = node->callers; edge; edge = edge->next_caller)
1818 {
1819 fprintf (f, "%s/%i ", cgraph_node_name (edge->caller),
1820 edge->caller->uid);
1821 if (edge->count)
1822 fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
1823 (HOST_WIDEST_INT)edge->count);
1824 if (edge->frequency)
1825 fprintf (f, "(%.2f per call) ",
1826 edge->frequency / (double)CGRAPH_FREQ_BASE);
1827 if (!edge->inline_failed)
1828 fprintf(f, "(inlined) ");
1829 if (edge->indirect_inlining_edge)
1830 fprintf(f, "(indirect_inlining) ");
1831 if (edge->can_throw_external)
1832 fprintf(f, "(can throw external) ");
1833 }
1834
1835 fprintf (f, "\n calls: ");
1836 for (edge = node->callees; edge; edge = edge->next_callee)
1837 {
1838 fprintf (f, "%s/%i ", cgraph_node_name (edge->callee),
1839 edge->callee->uid);
1840 if (!edge->inline_failed)
1841 fprintf(f, "(inlined) ");
1842 if (edge->indirect_inlining_edge)
1843 fprintf(f, "(indirect_inlining) ");
1844 if (edge->count)
1845 fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
1846 (HOST_WIDEST_INT)edge->count);
1847 if (edge->frequency)
1848 fprintf (f, "(%.2f per call) ",
1849 edge->frequency / (double)CGRAPH_FREQ_BASE);
1850 if (edge->loop_nest)
1851 fprintf (f, "(nested in %i loops) ", edge->loop_nest);
1852 if (edge->can_throw_external)
1853 fprintf(f, "(can throw external) ");
1854 }
1855 fprintf (f, "\n");
1856
1857 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
1858 indirect_calls_count++;
1859 if (indirect_calls_count)
1860 fprintf (f, " has %i outgoing edges for indirect calls.\n",
1861 indirect_calls_count);
1862
1863 if (node->same_body)
1864 {
1865 struct cgraph_node *n;
1866 fprintf (f, " aliases & thunks:");
1867 for (n = node->same_body; n; n = n->next)
1868 {
1869 fprintf (f, " %s/%i", cgraph_node_name (n), n->uid);
1870 if (n->thunk.thunk_p)
1871 {
1872 fprintf (f, " (thunk of %s fixed ofset %i virtual value %i has "
1873 "virtual offset %i",
1874 lang_hooks.decl_printable_name (n->thunk.alias, 2),
1875 (int)n->thunk.fixed_offset,
1876 (int)n->thunk.virtual_value,
1877 (int)n->thunk.virtual_offset_p);
1878 fprintf (f, ")");
1879 }
1880 }
1881 fprintf (f, "\n");
1882 }
1883 }
1884
1885
1886 /* Dump call graph node NODE to stderr. */
1887
1888 void
1889 debug_cgraph_node (struct cgraph_node *node)
1890 {
1891 dump_cgraph_node (stderr, node);
1892 }
1893
1894
1895 /* Dump the callgraph to file F. */
1896
1897 void
1898 dump_cgraph (FILE *f)
1899 {
1900 struct cgraph_node *node;
1901
1902 fprintf (f, "callgraph:\n\n");
1903 for (node = cgraph_nodes; node; node = node->next)
1904 dump_cgraph_node (f, node);
1905 }
1906
1907
1908 /* Dump the call graph to stderr. */
1909
1910 void
1911 debug_cgraph (void)
1912 {
1913 dump_cgraph (stderr);
1914 }
1915
1916
1917 /* Set the DECL_ASSEMBLER_NAME and update cgraph hashtables. */
1918
1919 void
1920 change_decl_assembler_name (tree decl, tree name)
1921 {
1922 gcc_assert (!assembler_name_hash);
1923 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
1924 {
1925 SET_DECL_ASSEMBLER_NAME (decl, name);
1926 return;
1927 }
1928 if (name == DECL_ASSEMBLER_NAME (decl))
1929 return;
1930
1931 if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))
1932 && DECL_RTL_SET_P (decl))
1933 warning (0, "%D renamed after being referenced in assembly", decl);
1934
1935 SET_DECL_ASSEMBLER_NAME (decl, name);
1936 }
1937
1938 /* Add a top-level asm statement to the list. */
1939
1940 struct cgraph_asm_node *
1941 cgraph_add_asm_node (tree asm_str)
1942 {
1943 struct cgraph_asm_node *node;
1944
1945 node = GGC_CNEW (struct cgraph_asm_node);
1946 node->asm_str = asm_str;
1947 node->order = cgraph_order++;
1948 node->next = NULL;
1949 if (cgraph_asm_nodes == NULL)
1950 cgraph_asm_nodes = node;
1951 else
1952 cgraph_asm_last_node->next = node;
1953 cgraph_asm_last_node = node;
1954 return node;
1955 }
1956
1957 /* Return true when the DECL can possibly be inlined. */
1958 bool
1959 cgraph_function_possibly_inlined_p (tree decl)
1960 {
1961 if (!cgraph_global_info_ready)
1962 return !DECL_UNINLINABLE (decl);
1963 return DECL_POSSIBLY_INLINED (decl);
1964 }
1965
1966 /* Create clone of E in the node N represented by CALL_EXPR the callgraph. */
1967 struct cgraph_edge *
1968 cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n,
1969 gimple call_stmt, unsigned stmt_uid, gcov_type count_scale,
1970 int freq_scale, int loop_nest, bool update_original)
1971 {
1972 struct cgraph_edge *new_edge;
1973 gcov_type count = e->count * count_scale / REG_BR_PROB_BASE;
1974 gcov_type freq;
1975
1976 /* We do not want to ignore loop nest after frequency drops to 0. */
1977 if (!freq_scale)
1978 freq_scale = 1;
1979 freq = e->frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE;
1980 if (freq > CGRAPH_FREQ_MAX)
1981 freq = CGRAPH_FREQ_MAX;
1982
1983 if (e->indirect_unknown_callee)
1984 {
1985 tree decl;
1986
1987 if (call_stmt && (decl = gimple_call_fndecl (call_stmt)))
1988 {
1989 struct cgraph_node *callee = cgraph_node (decl);
1990 new_edge = cgraph_create_edge (n, callee, call_stmt, count, freq,
1991 e->loop_nest + loop_nest);
1992 }
1993 else
1994 {
1995 new_edge = cgraph_create_indirect_edge (n, call_stmt, count, freq,
1996 e->loop_nest + loop_nest);
1997 new_edge->indirect_info->param_index = e->indirect_info->param_index;
1998 }
1999 }
2000 else
2001 new_edge = cgraph_create_edge (n, e->callee, call_stmt, count, freq,
2002 e->loop_nest + loop_nest);
2003
2004 new_edge->inline_failed = e->inline_failed;
2005 new_edge->indirect_inlining_edge = e->indirect_inlining_edge;
2006 new_edge->lto_stmt_uid = stmt_uid;
2007 if (update_original)
2008 {
2009 e->count -= new_edge->count;
2010 if (e->count < 0)
2011 e->count = 0;
2012 }
2013 cgraph_call_edge_duplication_hooks (e, new_edge);
2014 return new_edge;
2015 }
2016
2017 /* Create node representing clone of N executed COUNT times. Decrease
2018 the execution counts from original node too.
2019
2020 When UPDATE_ORIGINAL is true, the counts are subtracted from the original
2021 function's profile to reflect the fact that part of execution is handled
2022 by node. */
2023 struct cgraph_node *
2024 cgraph_clone_node (struct cgraph_node *n, gcov_type count, int freq,
2025 int loop_nest, bool update_original,
2026 VEC(cgraph_edge_p,heap) *redirect_callers)
2027 {
2028 struct cgraph_node *new_node = cgraph_create_node ();
2029 struct cgraph_edge *e;
2030 gcov_type count_scale;
2031 unsigned i;
2032
2033 new_node->decl = n->decl;
2034 new_node->origin = n->origin;
2035 if (new_node->origin)
2036 {
2037 new_node->next_nested = new_node->origin->nested;
2038 new_node->origin->nested = new_node;
2039 }
2040 new_node->analyzed = n->analyzed;
2041 new_node->local = n->local;
2042 new_node->local.externally_visible = false;
2043 new_node->local.local = true;
2044 new_node->local.vtable_method = false;
2045 new_node->global = n->global;
2046 new_node->rtl = n->rtl;
2047 new_node->count = count;
2048 new_node->frequency = n->frequency;
2049 new_node->clone = n->clone;
2050 new_node->clone.tree_map = 0;
2051 if (n->count)
2052 {
2053 if (new_node->count > n->count)
2054 count_scale = REG_BR_PROB_BASE;
2055 else
2056 count_scale = new_node->count * REG_BR_PROB_BASE / n->count;
2057 }
2058 else
2059 count_scale = 0;
2060 if (update_original)
2061 {
2062 n->count -= count;
2063 if (n->count < 0)
2064 n->count = 0;
2065 }
2066
2067 for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
2068 {
2069 /* Redirect calls to the old version node to point to its new
2070 version. */
2071 cgraph_redirect_edge_callee (e, new_node);
2072 }
2073
2074
2075 for (e = n->callees;e; e=e->next_callee)
2076 cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
2077 count_scale, freq, loop_nest, update_original);
2078
2079 for (e = n->indirect_calls; e; e = e->next_callee)
2080 cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
2081 count_scale, freq, loop_nest, update_original);
2082
2083 new_node->next_sibling_clone = n->clones;
2084 if (n->clones)
2085 n->clones->prev_sibling_clone = new_node;
2086 n->clones = new_node;
2087 new_node->clone_of = n;
2088
2089 cgraph_call_node_duplication_hooks (n, new_node);
2090 return new_node;
2091 }
2092
2093 /* Create a new name for omp child function. Returns an identifier. */
2094
2095 static GTY(()) unsigned int clone_fn_id_num;
2096
2097 static tree
2098 clone_function_name (tree decl)
2099 {
2100 tree name = DECL_ASSEMBLER_NAME (decl);
2101 size_t len = IDENTIFIER_LENGTH (name);
2102 char *tmp_name, *prefix;
2103
2104 prefix = XALLOCAVEC (char, len + strlen ("_clone") + 1);
2105 memcpy (prefix, IDENTIFIER_POINTER (name), len);
2106 strcpy (prefix + len, "_clone");
2107 #ifndef NO_DOT_IN_LABEL
2108 prefix[len] = '.';
2109 #elif !defined NO_DOLLAR_IN_LABEL
2110 prefix[len] = '$';
2111 #endif
2112 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix, clone_fn_id_num++);
2113 return get_identifier (tmp_name);
2114 }
2115
2116 /* Create callgraph node clone with new declaration. The actual body will
2117 be copied later at compilation stage.
2118
2119 TODO: after merging in ipa-sra use function call notes instead of args_to_skip
2120 bitmap interface.
2121 */
2122 struct cgraph_node *
2123 cgraph_create_virtual_clone (struct cgraph_node *old_node,
2124 VEC(cgraph_edge_p,heap) *redirect_callers,
2125 VEC(ipa_replace_map_p,gc) *tree_map,
2126 bitmap args_to_skip)
2127 {
2128 tree old_decl = old_node->decl;
2129 struct cgraph_node *new_node = NULL;
2130 tree new_decl;
2131 struct cgraph_node key, **slot;
2132
2133 gcc_assert (tree_versionable_function_p (old_decl));
2134
2135 /* Make a new FUNCTION_DECL tree node */
2136 if (!args_to_skip)
2137 new_decl = copy_node (old_decl);
2138 else
2139 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
2140 DECL_STRUCT_FUNCTION (new_decl) = NULL;
2141
2142 /* Generate a new name for the new version. */
2143 DECL_NAME (new_decl) = clone_function_name (old_decl);
2144 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
2145 SET_DECL_RTL (new_decl, NULL);
2146
2147 new_node = cgraph_clone_node (old_node, old_node->count,
2148 CGRAPH_FREQ_BASE, 0, false,
2149 redirect_callers);
2150 new_node->decl = new_decl;
2151 /* Update the properties.
2152 Make clone visible only within this translation unit. Make sure
2153 that is not weak also.
2154 ??? We cannot use COMDAT linkage because there is no
2155 ABI support for this. */
2156 DECL_EXTERNAL (new_node->decl) = 0;
2157 DECL_COMDAT_GROUP (new_node->decl) = 0;
2158 TREE_PUBLIC (new_node->decl) = 0;
2159 DECL_COMDAT (new_node->decl) = 0;
2160 DECL_WEAK (new_node->decl) = 0;
2161 new_node->clone.tree_map = tree_map;
2162 new_node->clone.args_to_skip = args_to_skip;
2163 if (!args_to_skip)
2164 new_node->clone.combined_args_to_skip = old_node->clone.combined_args_to_skip;
2165 else if (old_node->clone.combined_args_to_skip)
2166 {
2167 int newi = 0, oldi = 0;
2168 tree arg;
2169 bitmap new_args_to_skip = BITMAP_GGC_ALLOC ();
2170 struct cgraph_node *orig_node;
2171 for (orig_node = old_node; orig_node->clone_of; orig_node = orig_node->clone_of)
2172 ;
2173 for (arg = DECL_ARGUMENTS (orig_node->decl); arg; arg = TREE_CHAIN (arg), oldi++)
2174 {
2175 if (bitmap_bit_p (old_node->clone.combined_args_to_skip, oldi))
2176 {
2177 bitmap_set_bit (new_args_to_skip, oldi);
2178 continue;
2179 }
2180 if (bitmap_bit_p (args_to_skip, newi))
2181 bitmap_set_bit (new_args_to_skip, oldi);
2182 newi++;
2183 }
2184 new_node->clone.combined_args_to_skip = new_args_to_skip;
2185 }
2186 else
2187 new_node->clone.combined_args_to_skip = args_to_skip;
2188 new_node->local.externally_visible = 0;
2189 new_node->local.local = 1;
2190 new_node->lowered = true;
2191 new_node->reachable = true;
2192
2193 key.decl = new_decl;
2194 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
2195 gcc_assert (!*slot);
2196 *slot = new_node;
2197 if (assembler_name_hash)
2198 {
2199 void **aslot;
2200 tree name = DECL_ASSEMBLER_NAME (new_decl);
2201
2202 aslot = htab_find_slot_with_hash (assembler_name_hash, name,
2203 decl_assembler_name_hash (name),
2204 INSERT);
2205 gcc_assert (!*aslot);
2206 *aslot = new_node;
2207 }
2208
2209 return new_node;
2210 }
2211
2212 /* NODE is no longer nested function; update cgraph accordingly. */
2213 void
2214 cgraph_unnest_node (struct cgraph_node *node)
2215 {
2216 struct cgraph_node **node2 = &node->origin->nested;
2217 gcc_assert (node->origin);
2218
2219 while (*node2 != node)
2220 node2 = &(*node2)->next_nested;
2221 *node2 = node->next_nested;
2222 node->origin = NULL;
2223 }
2224
2225 /* Return function availability. See cgraph.h for description of individual
2226 return values. */
2227 enum availability
2228 cgraph_function_body_availability (struct cgraph_node *node)
2229 {
2230 enum availability avail;
2231 gcc_assert (cgraph_function_flags_ready);
2232 if (!node->analyzed)
2233 avail = AVAIL_NOT_AVAILABLE;
2234 else if (node->local.local)
2235 avail = AVAIL_LOCAL;
2236 else if (!node->local.externally_visible)
2237 avail = AVAIL_AVAILABLE;
2238 /* Inline functions are safe to be analyzed even if their sybol can
2239 be overwritten at runtime. It is not meaningful to enfore any sane
2240 behaviour on replacing inline function by different body. */
2241 else if (DECL_DECLARED_INLINE_P (node->decl))
2242 avail = AVAIL_AVAILABLE;
2243
2244 /* If the function can be overwritten, return OVERWRITABLE. Take
2245 care at least of two notable extensions - the COMDAT functions
2246 used to share template instantiations in C++ (this is symmetric
2247 to code cp_cannot_inline_tree_fn and probably shall be shared and
2248 the inlinability hooks completely eliminated).
2249
2250 ??? Does the C++ one definition rule allow us to always return
2251 AVAIL_AVAILABLE here? That would be good reason to preserve this
2252 bit. */
2253
2254 else if (DECL_REPLACEABLE_P (node->decl) && !DECL_EXTERNAL (node->decl))
2255 avail = AVAIL_OVERWRITABLE;
2256 else avail = AVAIL_AVAILABLE;
2257
2258 return avail;
2259 }
2260
2261 /* Add the function FNDECL to the call graph.
2262 Unlike cgraph_finalize_function, this function is intended to be used
2263 by middle end and allows insertion of new function at arbitrary point
2264 of compilation. The function can be either in high, low or SSA form
2265 GIMPLE.
2266
2267 The function is assumed to be reachable and have address taken (so no
2268 API breaking optimizations are performed on it).
2269
2270 Main work done by this function is to enqueue the function for later
2271 processing to avoid need the passes to be re-entrant. */
2272
2273 void
2274 cgraph_add_new_function (tree fndecl, bool lowered)
2275 {
2276 struct cgraph_node *node;
2277 switch (cgraph_state)
2278 {
2279 case CGRAPH_STATE_CONSTRUCTION:
2280 /* Just enqueue function to be processed at nearest occurrence. */
2281 node = cgraph_node (fndecl);
2282 node->next_needed = cgraph_new_nodes;
2283 if (lowered)
2284 node->lowered = true;
2285 cgraph_new_nodes = node;
2286 break;
2287
2288 case CGRAPH_STATE_IPA:
2289 case CGRAPH_STATE_IPA_SSA:
2290 case CGRAPH_STATE_EXPANSION:
2291 /* Bring the function into finalized state and enqueue for later
2292 analyzing and compilation. */
2293 node = cgraph_node (fndecl);
2294 node->local.local = false;
2295 node->local.finalized = true;
2296 node->reachable = node->needed = true;
2297 if (!lowered && cgraph_state == CGRAPH_STATE_EXPANSION)
2298 {
2299 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
2300 current_function_decl = fndecl;
2301 gimple_register_cfg_hooks ();
2302 tree_lowering_passes (fndecl);
2303 bitmap_obstack_initialize (NULL);
2304 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
2305 execute_pass_list (pass_early_local_passes.pass.sub);
2306 bitmap_obstack_release (NULL);
2307 pop_cfun ();
2308 current_function_decl = NULL;
2309
2310 lowered = true;
2311 }
2312 if (lowered)
2313 node->lowered = true;
2314 node->next_needed = cgraph_new_nodes;
2315 cgraph_new_nodes = node;
2316 break;
2317
2318 case CGRAPH_STATE_FINISHED:
2319 /* At the very end of compilation we have to do all the work up
2320 to expansion. */
2321 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
2322 current_function_decl = fndecl;
2323 gimple_register_cfg_hooks ();
2324 if (!lowered)
2325 tree_lowering_passes (fndecl);
2326 bitmap_obstack_initialize (NULL);
2327 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
2328 execute_pass_list (pass_early_local_passes.pass.sub);
2329 bitmap_obstack_release (NULL);
2330 tree_rest_of_compilation (fndecl);
2331 pop_cfun ();
2332 current_function_decl = NULL;
2333 break;
2334 }
2335
2336 /* Set a personality if required and we already passed EH lowering. */
2337 if (lowered
2338 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
2339 == eh_personality_lang))
2340 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
2341 }
2342
2343 /* Return true if NODE can be made local for API change.
2344 Extern inline functions and C++ COMDAT functions can be made local
2345 at the expense of possible code size growth if function is used in multiple
2346 compilation units. */
2347 bool
2348 cgraph_node_can_be_local_p (struct cgraph_node *node)
2349 {
2350 return (!node->needed
2351 && ((DECL_COMDAT (node->decl) && !node->same_comdat_group)
2352 || !node->local.externally_visible));
2353 }
2354
2355 /* Make DECL local. FIXME: We shouldn't need to mess with rtl this early,
2356 but other code such as notice_global_symbol generates rtl. */
2357 void
2358 cgraph_make_decl_local (tree decl)
2359 {
2360 rtx rtl, symbol;
2361
2362 if (TREE_CODE (decl) == VAR_DECL)
2363 DECL_COMMON (decl) = 0;
2364 else if (TREE_CODE (decl) == FUNCTION_DECL)
2365 {
2366 DECL_COMDAT (decl) = 0;
2367 DECL_COMDAT_GROUP (decl) = 0;
2368 DECL_WEAK (decl) = 0;
2369 DECL_EXTERNAL (decl) = 0;
2370 }
2371 else
2372 gcc_unreachable ();
2373 TREE_PUBLIC (decl) = 0;
2374 if (!DECL_RTL_SET_P (decl))
2375 return;
2376
2377 /* Update rtl flags. */
2378 make_decl_rtl (decl);
2379
2380 rtl = DECL_RTL (decl);
2381 if (!MEM_P (rtl))
2382 return;
2383
2384 symbol = XEXP (rtl, 0);
2385 if (GET_CODE (symbol) != SYMBOL_REF)
2386 return;
2387
2388 SYMBOL_REF_WEAK (symbol) = DECL_WEAK (decl);
2389 }
2390
2391 /* Bring NODE local. */
2392 void
2393 cgraph_make_node_local (struct cgraph_node *node)
2394 {
2395 gcc_assert (cgraph_node_can_be_local_p (node));
2396 if (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
2397 {
2398 struct cgraph_node *alias;
2399 cgraph_make_decl_local (node->decl);
2400
2401 for (alias = node->same_body; alias; alias = alias->next)
2402 cgraph_make_decl_local (alias->decl);
2403
2404 node->local.externally_visible = false;
2405 node->local.local = true;
2406 gcc_assert (cgraph_function_body_availability (node) == AVAIL_LOCAL);
2407 }
2408 }
2409
2410 /* Set TREE_NOTHROW on NODE's decl and on same_body aliases of NODE
2411 if any to NOTHROW. */
2412
2413 void
2414 cgraph_set_nothrow_flag (struct cgraph_node *node, bool nothrow)
2415 {
2416 struct cgraph_node *alias;
2417 TREE_NOTHROW (node->decl) = nothrow;
2418 for (alias = node->same_body; alias; alias = alias->next)
2419 TREE_NOTHROW (alias->decl) = nothrow;
2420 }
2421
2422 /* Set TREE_READONLY on NODE's decl and on same_body aliases of NODE
2423 if any to READONLY. */
2424
2425 void
2426 cgraph_set_readonly_flag (struct cgraph_node *node, bool readonly)
2427 {
2428 struct cgraph_node *alias;
2429 TREE_READONLY (node->decl) = readonly;
2430 for (alias = node->same_body; alias; alias = alias->next)
2431 TREE_READONLY (alias->decl) = readonly;
2432 }
2433
2434 /* Set DECL_PURE_P on NODE's decl and on same_body aliases of NODE
2435 if any to PURE. */
2436
2437 void
2438 cgraph_set_pure_flag (struct cgraph_node *node, bool pure)
2439 {
2440 struct cgraph_node *alias;
2441 DECL_PURE_P (node->decl) = pure;
2442 for (alias = node->same_body; alias; alias = alias->next)
2443 DECL_PURE_P (alias->decl) = pure;
2444 }
2445
2446 /* Set DECL_LOOPING_CONST_OR_PURE_P on NODE's decl and on
2447 same_body aliases of NODE if any to LOOPING_CONST_OR_PURE. */
2448
2449 void
2450 cgraph_set_looping_const_or_pure_flag (struct cgraph_node *node,
2451 bool looping_const_or_pure)
2452 {
2453 struct cgraph_node *alias;
2454 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = looping_const_or_pure;
2455 for (alias = node->same_body; alias; alias = alias->next)
2456 DECL_LOOPING_CONST_OR_PURE_P (alias->decl) = looping_const_or_pure;
2457 }
2458
2459 /* See if the frequency of NODE can be updated based on frequencies of its
2460 callers. */
2461 bool
2462 cgraph_propagate_frequency (struct cgraph_node *node)
2463 {
2464 bool maybe_unlikely_executed = true, maybe_executed_once = true;
2465 struct cgraph_edge *edge;
2466 if (!node->local.local)
2467 return false;
2468 gcc_assert (node->analyzed);
2469 if (node->frequency == NODE_FREQUENCY_HOT)
2470 return false;
2471 if (node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
2472 return false;
2473 if (dump_file && (dump_flags & TDF_DETAILS))
2474 fprintf (dump_file, "Processing frequency %s\n", cgraph_node_name (node));
2475 for (edge = node->callers;
2476 edge && (maybe_unlikely_executed || maybe_executed_once);
2477 edge = edge->next_caller)
2478 {
2479 if (!edge->frequency)
2480 continue;
2481 switch (edge->caller->frequency)
2482 {
2483 case NODE_FREQUENCY_UNLIKELY_EXECUTED:
2484 break;
2485 case NODE_FREQUENCY_EXECUTED_ONCE:
2486 if (dump_file && (dump_flags & TDF_DETAILS))
2487 fprintf (dump_file, " Called by %s that is executed once\n", cgraph_node_name (node));
2488 maybe_unlikely_executed = false;
2489 if (edge->loop_nest)
2490 {
2491 maybe_executed_once = false;
2492 if (dump_file && (dump_flags & TDF_DETAILS))
2493 fprintf (dump_file, " Called in loop\n");
2494 }
2495 break;
2496 case NODE_FREQUENCY_HOT:
2497 case NODE_FREQUENCY_NORMAL:
2498 if (dump_file && (dump_flags & TDF_DETAILS))
2499 fprintf (dump_file, " Called by %s that is normal or hot\n", cgraph_node_name (node));
2500 maybe_unlikely_executed = false;
2501 maybe_executed_once = false;
2502 break;
2503 }
2504 }
2505 if (maybe_unlikely_executed)
2506 {
2507 node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED;
2508 if (dump_file)
2509 fprintf (dump_file, "Node %s promoted to unlikely executed.\n", cgraph_node_name (node));
2510 return true;
2511 }
2512 if (maybe_executed_once && node->frequency != NODE_FREQUENCY_EXECUTED_ONCE)
2513 {
2514 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2515 if (dump_file)
2516 fprintf (dump_file, "Node %s promoted to executed once.\n", cgraph_node_name (node));
2517 return true;
2518 }
2519 return false;
2520 }
2521
2522 #include "gt-cgraph.h"