tree-vect-stmts.c (vectorizable_load): For SLP without permutation treat the first...
[gcc.git] / gcc / cgraph.c
1 /* Callgraph handling code.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This file contains basic routines manipulating call graph
23
24 The callgraph:
25
26 The call-graph is data structure designed for intra-procedural optimization
27 but it is also used in non-unit-at-a-time compilation to allow easier code
28 sharing.
29
30 The call-graph consist of nodes and edges represented via linked lists.
31 Each function (external or not) corresponds to the unique node.
32
33 The mapping from declarations to call-graph nodes is done using hash table
34 based on DECL_UID. The call-graph nodes are created lazily using
35 cgraph_node function when called for unknown declaration.
36
37 The callgraph at the moment does not represent all indirect calls or calls
38 from other compilation units. Flag NEEDED is set for each node that may be
39 accessed in such an invisible way and it shall be considered an entry point
40 to the callgraph.
41
42 On the other hand, the callgraph currently does contain some edges for
43 indirect calls with unknown callees which can be accessed through
44 indirect_calls field of a node. It should be noted however that at the
45 moment only calls which are potential candidates for indirect inlining are
46 added there.
47
48 Interprocedural information:
49
50 Callgraph is place to store data needed for interprocedural optimization.
51 All data structures are divided into three components: local_info that
52 is produced while analyzing the function, global_info that is result
53 of global walking of the callgraph on the end of compilation and
54 rtl_info used by RTL backend to propagate data from already compiled
55 functions to their callers.
56
57 Moreover, each node has a uid which can be used to keep information in
58 on-the-side arrays. UIDs are reused and therefore reasonably dense.
59
60 Inlining plans:
61
62 The function inlining information is decided in advance and maintained
63 in the callgraph as so called inline plan.
64 For each inlined call, the callee's node is cloned to represent the
65 new function copy produced by inliner.
66 Each inlined call gets a unique corresponding clone node of the callee
67 and the data structure is updated while inlining is performed, so
68 the clones are eliminated and their callee edges redirected to the
69 caller.
70
71 Each edge has "inline_failed" field. When the field is set to NULL,
72 the call will be inlined. When it is non-NULL it contains a reason
73 why inlining wasn't performed. */
74
75 #include "config.h"
76 #include "system.h"
77 #include "coretypes.h"
78 #include "tm.h"
79 #include "tree.h"
80 #include "tree-inline.h"
81 #include "langhooks.h"
82 #include "hashtab.h"
83 #include "toplev.h"
84 #include "flags.h"
85 #include "ggc.h"
86 #include "debug.h"
87 #include "target.h"
88 #include "basic-block.h"
89 #include "cgraph.h"
90 #include "output.h"
91 #include "intl.h"
92 #include "gimple.h"
93 #include "tree-dump.h"
94 #include "tree-flow.h"
95 #include "value-prof.h"
96 #include "except.h"
97 #include "diagnostic-core.h"
98 #include "rtl.h"
99 #include "ipa-utils.h"
100 #include "lto-streamer.h"
101 #include "ipa-inline.h"
102
103 const char * const ld_plugin_symbol_resolution_names[]=
104 {
105 "",
106 "undef",
107 "prevailing_def",
108 "prevailing_def_ironly",
109 "preempted_reg",
110 "preempted_ir",
111 "resolved_ir",
112 "resolved_exec",
113 "resolved_dyn",
114 "prevailing_def_ironly_exp"
115 };
116
117 static void cgraph_node_remove_callers (struct cgraph_node *node);
118 static inline void cgraph_edge_remove_caller (struct cgraph_edge *e);
119 static inline void cgraph_edge_remove_callee (struct cgraph_edge *e);
120
121 /* Hash table used to convert declarations into nodes. */
122 static GTY((param_is (struct cgraph_node))) htab_t cgraph_hash;
123 /* Hash table used to convert assembler names into nodes. */
124 static GTY((param_is (struct cgraph_node))) htab_t assembler_name_hash;
125
126 /* The linked list of cgraph nodes. */
127 struct cgraph_node *cgraph_nodes;
128
129 /* Queue of cgraph nodes scheduled to be lowered. */
130 struct cgraph_node *cgraph_nodes_queue;
131
132 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
133 secondary queue used during optimization to accommodate passes that
134 may generate new functions that need to be optimized and expanded. */
135 struct cgraph_node *cgraph_new_nodes;
136
137 /* Number of nodes in existence. */
138 int cgraph_n_nodes;
139
140 /* Maximal uid used in cgraph nodes. */
141 int cgraph_max_uid;
142
143 /* Maximal uid used in cgraph edges. */
144 int cgraph_edge_max_uid;
145
146 /* Set when whole unit has been analyzed so we can access global info. */
147 bool cgraph_global_info_ready = false;
148
149 /* What state callgraph is in right now. */
150 enum cgraph_state cgraph_state = CGRAPH_STATE_CONSTRUCTION;
151
152 /* Set when the cgraph is fully build and the basic flags are computed. */
153 bool cgraph_function_flags_ready = false;
154
155 /* Linked list of cgraph asm nodes. */
156 struct cgraph_asm_node *cgraph_asm_nodes;
157
158 /* Last node in cgraph_asm_nodes. */
159 static GTY(()) struct cgraph_asm_node *cgraph_asm_last_node;
160
161 /* The order index of the next cgraph node to be created. This is
162 used so that we can sort the cgraph nodes in order by when we saw
163 them, to support -fno-toplevel-reorder. */
164 int cgraph_order;
165
166 /* List of hooks triggered on cgraph_edge events. */
167 struct cgraph_edge_hook_list {
168 cgraph_edge_hook hook;
169 void *data;
170 struct cgraph_edge_hook_list *next;
171 };
172
173 /* List of hooks triggered on cgraph_node events. */
174 struct cgraph_node_hook_list {
175 cgraph_node_hook hook;
176 void *data;
177 struct cgraph_node_hook_list *next;
178 };
179
180 /* List of hooks triggered on events involving two cgraph_edges. */
181 struct cgraph_2edge_hook_list {
182 cgraph_2edge_hook hook;
183 void *data;
184 struct cgraph_2edge_hook_list *next;
185 };
186
187 /* List of hooks triggered on events involving two cgraph_nodes. */
188 struct cgraph_2node_hook_list {
189 cgraph_2node_hook hook;
190 void *data;
191 struct cgraph_2node_hook_list *next;
192 };
193
194 /* List of hooks triggered when an edge is removed. */
195 struct cgraph_edge_hook_list *first_cgraph_edge_removal_hook;
196 /* List of hooks triggered when a node is removed. */
197 struct cgraph_node_hook_list *first_cgraph_node_removal_hook;
198 /* List of hooks triggered when an edge is duplicated. */
199 struct cgraph_2edge_hook_list *first_cgraph_edge_duplicated_hook;
200 /* List of hooks triggered when a node is duplicated. */
201 struct cgraph_2node_hook_list *first_cgraph_node_duplicated_hook;
202 /* List of hooks triggered when an function is inserted. */
203 struct cgraph_node_hook_list *first_cgraph_function_insertion_hook;
204
205 /* Head of a linked list of unused (freed) call graph nodes.
206 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
207 static GTY(()) struct cgraph_node *free_nodes;
208 /* Head of a linked list of unused (freed) call graph edges.
209 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
210 static GTY(()) struct cgraph_edge *free_edges;
211
212 /* Did procss_same_body_aliases run? */
213 bool same_body_aliases_done;
214
215 /* Macros to access the next item in the list of free cgraph nodes and
216 edges. */
217 #define NEXT_FREE_NODE(NODE) (NODE)->next
218 #define NEXT_FREE_EDGE(EDGE) (EDGE)->prev_caller
219
220 /* Register HOOK to be called with DATA on each removed edge. */
221 struct cgraph_edge_hook_list *
222 cgraph_add_edge_removal_hook (cgraph_edge_hook hook, void *data)
223 {
224 struct cgraph_edge_hook_list *entry;
225 struct cgraph_edge_hook_list **ptr = &first_cgraph_edge_removal_hook;
226
227 entry = (struct cgraph_edge_hook_list *) xmalloc (sizeof (*entry));
228 entry->hook = hook;
229 entry->data = data;
230 entry->next = NULL;
231 while (*ptr)
232 ptr = &(*ptr)->next;
233 *ptr = entry;
234 return entry;
235 }
236
237 /* Remove ENTRY from the list of hooks called on removing edges. */
238 void
239 cgraph_remove_edge_removal_hook (struct cgraph_edge_hook_list *entry)
240 {
241 struct cgraph_edge_hook_list **ptr = &first_cgraph_edge_removal_hook;
242
243 while (*ptr != entry)
244 ptr = &(*ptr)->next;
245 *ptr = entry->next;
246 free (entry);
247 }
248
249 /* Call all edge removal hooks. */
250 static void
251 cgraph_call_edge_removal_hooks (struct cgraph_edge *e)
252 {
253 struct cgraph_edge_hook_list *entry = first_cgraph_edge_removal_hook;
254 while (entry)
255 {
256 entry->hook (e, entry->data);
257 entry = entry->next;
258 }
259 }
260
261 /* Register HOOK to be called with DATA on each removed node. */
262 struct cgraph_node_hook_list *
263 cgraph_add_node_removal_hook (cgraph_node_hook hook, void *data)
264 {
265 struct cgraph_node_hook_list *entry;
266 struct cgraph_node_hook_list **ptr = &first_cgraph_node_removal_hook;
267
268 entry = (struct cgraph_node_hook_list *) xmalloc (sizeof (*entry));
269 entry->hook = hook;
270 entry->data = data;
271 entry->next = NULL;
272 while (*ptr)
273 ptr = &(*ptr)->next;
274 *ptr = entry;
275 return entry;
276 }
277
278 /* Remove ENTRY from the list of hooks called on removing nodes. */
279 void
280 cgraph_remove_node_removal_hook (struct cgraph_node_hook_list *entry)
281 {
282 struct cgraph_node_hook_list **ptr = &first_cgraph_node_removal_hook;
283
284 while (*ptr != entry)
285 ptr = &(*ptr)->next;
286 *ptr = entry->next;
287 free (entry);
288 }
289
290 /* Call all node removal hooks. */
291 static void
292 cgraph_call_node_removal_hooks (struct cgraph_node *node)
293 {
294 struct cgraph_node_hook_list *entry = first_cgraph_node_removal_hook;
295 while (entry)
296 {
297 entry->hook (node, entry->data);
298 entry = entry->next;
299 }
300 }
301
302 /* Register HOOK to be called with DATA on each inserted node. */
303 struct cgraph_node_hook_list *
304 cgraph_add_function_insertion_hook (cgraph_node_hook hook, void *data)
305 {
306 struct cgraph_node_hook_list *entry;
307 struct cgraph_node_hook_list **ptr = &first_cgraph_function_insertion_hook;
308
309 entry = (struct cgraph_node_hook_list *) xmalloc (sizeof (*entry));
310 entry->hook = hook;
311 entry->data = data;
312 entry->next = NULL;
313 while (*ptr)
314 ptr = &(*ptr)->next;
315 *ptr = entry;
316 return entry;
317 }
318
319 /* Remove ENTRY from the list of hooks called on inserted nodes. */
320 void
321 cgraph_remove_function_insertion_hook (struct cgraph_node_hook_list *entry)
322 {
323 struct cgraph_node_hook_list **ptr = &first_cgraph_function_insertion_hook;
324
325 while (*ptr != entry)
326 ptr = &(*ptr)->next;
327 *ptr = entry->next;
328 free (entry);
329 }
330
331 /* Call all node insertion hooks. */
332 void
333 cgraph_call_function_insertion_hooks (struct cgraph_node *node)
334 {
335 struct cgraph_node_hook_list *entry = first_cgraph_function_insertion_hook;
336 while (entry)
337 {
338 entry->hook (node, entry->data);
339 entry = entry->next;
340 }
341 }
342
343 /* Register HOOK to be called with DATA on each duplicated edge. */
344 struct cgraph_2edge_hook_list *
345 cgraph_add_edge_duplication_hook (cgraph_2edge_hook hook, void *data)
346 {
347 struct cgraph_2edge_hook_list *entry;
348 struct cgraph_2edge_hook_list **ptr = &first_cgraph_edge_duplicated_hook;
349
350 entry = (struct cgraph_2edge_hook_list *) xmalloc (sizeof (*entry));
351 entry->hook = hook;
352 entry->data = data;
353 entry->next = NULL;
354 while (*ptr)
355 ptr = &(*ptr)->next;
356 *ptr = entry;
357 return entry;
358 }
359
360 /* Remove ENTRY from the list of hooks called on duplicating edges. */
361 void
362 cgraph_remove_edge_duplication_hook (struct cgraph_2edge_hook_list *entry)
363 {
364 struct cgraph_2edge_hook_list **ptr = &first_cgraph_edge_duplicated_hook;
365
366 while (*ptr != entry)
367 ptr = &(*ptr)->next;
368 *ptr = entry->next;
369 free (entry);
370 }
371
372 /* Call all edge duplication hooks. */
373 static void
374 cgraph_call_edge_duplication_hooks (struct cgraph_edge *cs1,
375 struct cgraph_edge *cs2)
376 {
377 struct cgraph_2edge_hook_list *entry = first_cgraph_edge_duplicated_hook;
378 while (entry)
379 {
380 entry->hook (cs1, cs2, entry->data);
381 entry = entry->next;
382 }
383 }
384
385 /* Register HOOK to be called with DATA on each duplicated node. */
386 struct cgraph_2node_hook_list *
387 cgraph_add_node_duplication_hook (cgraph_2node_hook hook, void *data)
388 {
389 struct cgraph_2node_hook_list *entry;
390 struct cgraph_2node_hook_list **ptr = &first_cgraph_node_duplicated_hook;
391
392 entry = (struct cgraph_2node_hook_list *) xmalloc (sizeof (*entry));
393 entry->hook = hook;
394 entry->data = data;
395 entry->next = NULL;
396 while (*ptr)
397 ptr = &(*ptr)->next;
398 *ptr = entry;
399 return entry;
400 }
401
402 /* Remove ENTRY from the list of hooks called on duplicating nodes. */
403 void
404 cgraph_remove_node_duplication_hook (struct cgraph_2node_hook_list *entry)
405 {
406 struct cgraph_2node_hook_list **ptr = &first_cgraph_node_duplicated_hook;
407
408 while (*ptr != entry)
409 ptr = &(*ptr)->next;
410 *ptr = entry->next;
411 free (entry);
412 }
413
414 /* Call all node duplication hooks. */
415 static void
416 cgraph_call_node_duplication_hooks (struct cgraph_node *node1,
417 struct cgraph_node *node2)
418 {
419 struct cgraph_2node_hook_list *entry = first_cgraph_node_duplicated_hook;
420 while (entry)
421 {
422 entry->hook (node1, node2, entry->data);
423 entry = entry->next;
424 }
425 }
426
427 /* Returns a hash code for P. */
428
429 static hashval_t
430 hash_node (const void *p)
431 {
432 const struct cgraph_node *n = (const struct cgraph_node *) p;
433 return (hashval_t) DECL_UID (n->decl);
434 }
435
436
437 /* Returns nonzero if P1 and P2 are equal. */
438
439 static int
440 eq_node (const void *p1, const void *p2)
441 {
442 const struct cgraph_node *n1 = (const struct cgraph_node *) p1;
443 const struct cgraph_node *n2 = (const struct cgraph_node *) p2;
444 return DECL_UID (n1->decl) == DECL_UID (n2->decl);
445 }
446
447 /* Allocate new callgraph node. */
448
449 static inline struct cgraph_node *
450 cgraph_allocate_node (void)
451 {
452 struct cgraph_node *node;
453
454 if (free_nodes)
455 {
456 node = free_nodes;
457 free_nodes = NEXT_FREE_NODE (node);
458 }
459 else
460 {
461 node = ggc_alloc_cleared_cgraph_node ();
462 node->uid = cgraph_max_uid++;
463 }
464
465 return node;
466 }
467
468 /* Allocate new callgraph node and insert it into basic data structures. */
469
470 static struct cgraph_node *
471 cgraph_create_node_1 (void)
472 {
473 struct cgraph_node *node = cgraph_allocate_node ();
474
475 node->next = cgraph_nodes;
476 node->order = cgraph_order++;
477 if (cgraph_nodes)
478 cgraph_nodes->previous = node;
479 node->previous = NULL;
480 node->frequency = NODE_FREQUENCY_NORMAL;
481 node->count_materialization_scale = REG_BR_PROB_BASE;
482 ipa_empty_ref_list (&node->ref_list);
483 cgraph_nodes = node;
484 cgraph_n_nodes++;
485 return node;
486 }
487
488 /* Return cgraph node assigned to DECL. Create new one when needed. */
489
490 struct cgraph_node *
491 cgraph_create_node (tree decl)
492 {
493 struct cgraph_node key, *node, **slot;
494
495 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
496
497 if (!cgraph_hash)
498 cgraph_hash = htab_create_ggc (10, hash_node, eq_node, NULL);
499
500 key.decl = decl;
501 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
502 gcc_assert (!*slot);
503
504 node = cgraph_create_node_1 ();
505 node->decl = decl;
506 *slot = node;
507 if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
508 {
509 node->origin = cgraph_get_create_node (DECL_CONTEXT (decl));
510 node->next_nested = node->origin->nested;
511 node->origin->nested = node;
512 }
513 if (assembler_name_hash)
514 {
515 void **aslot;
516 tree name = DECL_ASSEMBLER_NAME (decl);
517
518 aslot = htab_find_slot_with_hash (assembler_name_hash, name,
519 decl_assembler_name_hash (name),
520 INSERT);
521 /* We can have multiple declarations with same assembler name. For C++
522 it is __builtin_strlen and strlen, for instance. Do we need to
523 record them all? Original implementation marked just first one
524 so lets hope for the best. */
525 if (*aslot == NULL)
526 *aslot = node;
527 }
528 return node;
529 }
530
531 /* Try to find a call graph node for declaration DECL and if it does not exist,
532 create it. */
533
534 struct cgraph_node *
535 cgraph_get_create_node (tree decl)
536 {
537 struct cgraph_node *node;
538
539 node = cgraph_get_node (decl);
540 if (node)
541 return node;
542
543 return cgraph_create_node (decl);
544 }
545
546 /* Mark ALIAS as an alias to DECL. DECL_NODE is cgraph node representing
547 the function body is associated with (not neccesarily cgraph_node (DECL). */
548
549 struct cgraph_node *
550 cgraph_create_function_alias (tree alias, tree decl)
551 {
552 struct cgraph_node *alias_node;
553
554 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
555 gcc_assert (TREE_CODE (alias) == FUNCTION_DECL);
556 alias_node = cgraph_get_create_node (alias);
557 gcc_assert (!alias_node->local.finalized);
558 alias_node->thunk.alias = decl;
559 alias_node->local.finalized = true;
560 alias_node->alias = 1;
561
562 if ((TREE_PUBLIC (alias) && !DECL_COMDAT (alias) && !DECL_EXTERNAL (alias))
563 || (DECL_VIRTUAL_P (alias)
564 && (DECL_COMDAT (alias) || DECL_EXTERNAL (alias))))
565 cgraph_mark_reachable_node (alias_node);
566 return alias_node;
567 }
568
569 /* Attempt to mark ALIAS as an alias to DECL. Return alias node if successful
570 and NULL otherwise.
571 Same body aliases are output whenever the body of DECL is output,
572 and cgraph_get_node (ALIAS) transparently returns cgraph_get_node (DECL). */
573
574 struct cgraph_node *
575 cgraph_same_body_alias (struct cgraph_node *decl_node ATTRIBUTE_UNUSED, tree alias, tree decl)
576 {
577 struct cgraph_node *n;
578 #ifndef ASM_OUTPUT_DEF
579 /* If aliases aren't supported by the assembler, fail. */
580 return NULL;
581 #endif
582 /* Langhooks can create same body aliases of symbols not defined.
583 Those are useless. Drop them on the floor. */
584 if (cgraph_global_info_ready)
585 return NULL;
586
587 n = cgraph_create_function_alias (alias, decl);
588 n->same_body_alias = true;
589 if (same_body_aliases_done)
590 ipa_record_reference (n, NULL, cgraph_get_node (decl), NULL, IPA_REF_ALIAS,
591 NULL);
592 return n;
593 }
594
595 /* Add thunk alias into callgraph. The alias declaration is ALIAS and it
596 aliases DECL with an adjustments made into the first parameter.
597 See comments in thunk_adjust for detail on the parameters. */
598
599 struct cgraph_node *
600 cgraph_add_thunk (struct cgraph_node *decl_node ATTRIBUTE_UNUSED,
601 tree alias, tree decl,
602 bool this_adjusting,
603 HOST_WIDE_INT fixed_offset, HOST_WIDE_INT virtual_value,
604 tree virtual_offset,
605 tree real_alias)
606 {
607 struct cgraph_node *node;
608
609 node = cgraph_get_node (alias);
610 if (node)
611 {
612 gcc_assert (node->local.finalized);
613 gcc_assert (!node->alias);
614 gcc_assert (!node->thunk.thunk_p);
615 cgraph_remove_node (node);
616 }
617
618 node = cgraph_create_node (alias);
619 gcc_checking_assert (!virtual_offset
620 || double_int_equal_p
621 (tree_to_double_int (virtual_offset),
622 shwi_to_double_int (virtual_value)));
623 node->thunk.fixed_offset = fixed_offset;
624 node->thunk.this_adjusting = this_adjusting;
625 node->thunk.virtual_value = virtual_value;
626 node->thunk.virtual_offset_p = virtual_offset != NULL;
627 node->thunk.alias = real_alias;
628 node->thunk.thunk_p = true;
629 node->local.finalized = true;
630
631 if (cgraph_decide_is_function_needed (node, decl))
632 cgraph_mark_needed_node (node);
633
634 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
635 || (DECL_VIRTUAL_P (decl)
636 && (DECL_COMDAT (decl) || DECL_EXTERNAL (decl))))
637 cgraph_mark_reachable_node (node);
638
639 return node;
640 }
641
642 /* Returns the cgraph node assigned to DECL or NULL if no cgraph node
643 is assigned. */
644
645 struct cgraph_node *
646 cgraph_get_node (const_tree decl)
647 {
648 struct cgraph_node key, *node = NULL, **slot;
649
650 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
651
652 if (!cgraph_hash)
653 return NULL;
654
655 key.decl = CONST_CAST2 (tree, const_tree, decl);
656
657 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key,
658 NO_INSERT);
659
660 if (slot && *slot)
661 node = *slot;
662 return node;
663 }
664
665 /* Insert already constructed node into hashtable. */
666
667 void
668 cgraph_insert_node_to_hashtable (struct cgraph_node *node)
669 {
670 struct cgraph_node **slot;
671
672 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, node, INSERT);
673
674 gcc_assert (!*slot);
675 *slot = node;
676 }
677
678 /* Returns a hash code for P. */
679
680 static hashval_t
681 hash_node_by_assembler_name (const void *p)
682 {
683 const struct cgraph_node *n = (const struct cgraph_node *) p;
684 return (hashval_t) decl_assembler_name_hash (DECL_ASSEMBLER_NAME (n->decl));
685 }
686
687 /* Returns nonzero if P1 and P2 are equal. */
688
689 static int
690 eq_assembler_name (const void *p1, const void *p2)
691 {
692 const struct cgraph_node *n1 = (const struct cgraph_node *) p1;
693 const_tree name = (const_tree)p2;
694 return (decl_assembler_name_equal (n1->decl, name));
695 }
696
697 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
698 Return NULL if there's no such node. */
699
700 struct cgraph_node *
701 cgraph_node_for_asm (tree asmname)
702 {
703 struct cgraph_node *node;
704 void **slot;
705
706 if (!assembler_name_hash)
707 {
708 assembler_name_hash =
709 htab_create_ggc (10, hash_node_by_assembler_name, eq_assembler_name,
710 NULL);
711 for (node = cgraph_nodes; node; node = node->next)
712 if (!node->global.inlined_to)
713 {
714 tree name = DECL_ASSEMBLER_NAME (node->decl);
715 slot = htab_find_slot_with_hash (assembler_name_hash, name,
716 decl_assembler_name_hash (name),
717 INSERT);
718 /* We can have multiple declarations with same assembler name. For C++
719 it is __builtin_strlen and strlen, for instance. Do we need to
720 record them all? Original implementation marked just first one
721 so lets hope for the best. */
722 if (!*slot)
723 *slot = node;
724 }
725 }
726
727 slot = htab_find_slot_with_hash (assembler_name_hash, asmname,
728 decl_assembler_name_hash (asmname),
729 NO_INSERT);
730
731 if (slot)
732 {
733 node = (struct cgraph_node *) *slot;
734 return node;
735 }
736 return NULL;
737 }
738
739 /* Returns a hash value for X (which really is a die_struct). */
740
741 static hashval_t
742 edge_hash (const void *x)
743 {
744 return htab_hash_pointer (((const struct cgraph_edge *) x)->call_stmt);
745 }
746
747 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
748
749 static int
750 edge_eq (const void *x, const void *y)
751 {
752 return ((const struct cgraph_edge *) x)->call_stmt == y;
753 }
754
755 /* Add call graph edge E to call site hash of its caller. */
756
757 static inline void
758 cgraph_add_edge_to_call_site_hash (struct cgraph_edge *e)
759 {
760 void **slot;
761 slot = htab_find_slot_with_hash (e->caller->call_site_hash,
762 e->call_stmt,
763 htab_hash_pointer (e->call_stmt),
764 INSERT);
765 gcc_assert (!*slot);
766 *slot = e;
767 }
768
769 /* Return the callgraph edge representing the GIMPLE_CALL statement
770 CALL_STMT. */
771
772 struct cgraph_edge *
773 cgraph_edge (struct cgraph_node *node, gimple call_stmt)
774 {
775 struct cgraph_edge *e, *e2;
776 int n = 0;
777
778 if (node->call_site_hash)
779 return (struct cgraph_edge *)
780 htab_find_with_hash (node->call_site_hash, call_stmt,
781 htab_hash_pointer (call_stmt));
782
783 /* This loop may turn out to be performance problem. In such case adding
784 hashtables into call nodes with very many edges is probably best
785 solution. It is not good idea to add pointer into CALL_EXPR itself
786 because we want to make possible having multiple cgraph nodes representing
787 different clones of the same body before the body is actually cloned. */
788 for (e = node->callees; e; e = e->next_callee)
789 {
790 if (e->call_stmt == call_stmt)
791 break;
792 n++;
793 }
794
795 if (!e)
796 for (e = node->indirect_calls; e; e = e->next_callee)
797 {
798 if (e->call_stmt == call_stmt)
799 break;
800 n++;
801 }
802
803 if (n > 100)
804 {
805 node->call_site_hash = htab_create_ggc (120, edge_hash, edge_eq, NULL);
806 for (e2 = node->callees; e2; e2 = e2->next_callee)
807 cgraph_add_edge_to_call_site_hash (e2);
808 for (e2 = node->indirect_calls; e2; e2 = e2->next_callee)
809 cgraph_add_edge_to_call_site_hash (e2);
810 }
811
812 return e;
813 }
814
815
816 /* Change field call_stmt of edge E to NEW_STMT. */
817
818 void
819 cgraph_set_call_stmt (struct cgraph_edge *e, gimple new_stmt)
820 {
821 tree decl;
822
823 if (e->caller->call_site_hash)
824 {
825 htab_remove_elt_with_hash (e->caller->call_site_hash,
826 e->call_stmt,
827 htab_hash_pointer (e->call_stmt));
828 }
829
830 e->call_stmt = new_stmt;
831 if (e->indirect_unknown_callee
832 && (decl = gimple_call_fndecl (new_stmt)))
833 {
834 /* Constant propagation (and possibly also inlining?) can turn an
835 indirect call into a direct one. */
836 struct cgraph_node *new_callee = cgraph_get_node (decl);
837
838 gcc_checking_assert (new_callee);
839 cgraph_make_edge_direct (e, new_callee);
840 }
841
842 push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
843 e->can_throw_external = stmt_can_throw_external (new_stmt);
844 pop_cfun ();
845 if (e->caller->call_site_hash)
846 cgraph_add_edge_to_call_site_hash (e);
847 }
848
849 /* Like cgraph_set_call_stmt but walk the clone tree and update all
850 clones sharing the same function body. */
851
852 void
853 cgraph_set_call_stmt_including_clones (struct cgraph_node *orig,
854 gimple old_stmt, gimple new_stmt)
855 {
856 struct cgraph_node *node;
857 struct cgraph_edge *edge = cgraph_edge (orig, old_stmt);
858
859 if (edge)
860 cgraph_set_call_stmt (edge, new_stmt);
861
862 node = orig->clones;
863 if (node)
864 while (node != orig)
865 {
866 struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
867 if (edge)
868 cgraph_set_call_stmt (edge, new_stmt);
869 if (node->clones)
870 node = node->clones;
871 else if (node->next_sibling_clone)
872 node = node->next_sibling_clone;
873 else
874 {
875 while (node != orig && !node->next_sibling_clone)
876 node = node->clone_of;
877 if (node != orig)
878 node = node->next_sibling_clone;
879 }
880 }
881 }
882
883 /* Like cgraph_create_edge walk the clone tree and update all clones sharing
884 same function body. If clones already have edge for OLD_STMT; only
885 update the edge same way as cgraph_set_call_stmt_including_clones does.
886
887 TODO: COUNT and LOOP_DEPTH should be properly distributed based on relative
888 frequencies of the clones. */
889
890 void
891 cgraph_create_edge_including_clones (struct cgraph_node *orig,
892 struct cgraph_node *callee,
893 gimple old_stmt,
894 gimple stmt, gcov_type count,
895 int freq,
896 cgraph_inline_failed_t reason)
897 {
898 struct cgraph_node *node;
899 struct cgraph_edge *edge;
900
901 if (!cgraph_edge (orig, stmt))
902 {
903 edge = cgraph_create_edge (orig, callee, stmt, count, freq);
904 edge->inline_failed = reason;
905 }
906
907 node = orig->clones;
908 if (node)
909 while (node != orig)
910 {
911 struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
912
913 /* It is possible that clones already contain the edge while
914 master didn't. Either we promoted indirect call into direct
915 call in the clone or we are processing clones of unreachable
916 master where edges has been removed. */
917 if (edge)
918 cgraph_set_call_stmt (edge, stmt);
919 else if (!cgraph_edge (node, stmt))
920 {
921 edge = cgraph_create_edge (node, callee, stmt, count,
922 freq);
923 edge->inline_failed = reason;
924 }
925
926 if (node->clones)
927 node = node->clones;
928 else if (node->next_sibling_clone)
929 node = node->next_sibling_clone;
930 else
931 {
932 while (node != orig && !node->next_sibling_clone)
933 node = node->clone_of;
934 if (node != orig)
935 node = node->next_sibling_clone;
936 }
937 }
938 }
939
940 /* Allocate a cgraph_edge structure and fill it with data according to the
941 parameters of which only CALLEE can be NULL (when creating an indirect call
942 edge). */
943
944 static struct cgraph_edge *
945 cgraph_create_edge_1 (struct cgraph_node *caller, struct cgraph_node *callee,
946 gimple call_stmt, gcov_type count, int freq)
947 {
948 struct cgraph_edge *edge;
949
950 /* LTO does not actually have access to the call_stmt since these
951 have not been loaded yet. */
952 if (call_stmt)
953 {
954 /* This is a rather expensive check possibly triggering
955 construction of call stmt hashtable. */
956 gcc_checking_assert (!cgraph_edge (caller, call_stmt));
957
958 gcc_assert (is_gimple_call (call_stmt));
959 }
960
961 if (free_edges)
962 {
963 edge = free_edges;
964 free_edges = NEXT_FREE_EDGE (edge);
965 }
966 else
967 {
968 edge = ggc_alloc_cgraph_edge ();
969 edge->uid = cgraph_edge_max_uid++;
970 }
971
972 edge->aux = NULL;
973 edge->caller = caller;
974 edge->callee = callee;
975 edge->prev_caller = NULL;
976 edge->next_caller = NULL;
977 edge->prev_callee = NULL;
978 edge->next_callee = NULL;
979
980 edge->count = count;
981 gcc_assert (count >= 0);
982 edge->frequency = freq;
983 gcc_assert (freq >= 0);
984 gcc_assert (freq <= CGRAPH_FREQ_MAX);
985
986 edge->call_stmt = call_stmt;
987 push_cfun (DECL_STRUCT_FUNCTION (caller->decl));
988 edge->can_throw_external
989 = call_stmt ? stmt_can_throw_external (call_stmt) : false;
990 pop_cfun ();
991 edge->call_stmt_cannot_inline_p =
992 (call_stmt ? gimple_call_cannot_inline_p (call_stmt) : false);
993 if (call_stmt && caller->call_site_hash)
994 cgraph_add_edge_to_call_site_hash (edge);
995
996 edge->indirect_info = NULL;
997 edge->indirect_inlining_edge = 0;
998
999 return edge;
1000 }
1001
1002 /* Create edge from CALLER to CALLEE in the cgraph. */
1003
1004 struct cgraph_edge *
1005 cgraph_create_edge (struct cgraph_node *caller, struct cgraph_node *callee,
1006 gimple call_stmt, gcov_type count, int freq)
1007 {
1008 struct cgraph_edge *edge = cgraph_create_edge_1 (caller, callee, call_stmt,
1009 count, freq);
1010
1011 edge->indirect_unknown_callee = 0;
1012 initialize_inline_failed (edge);
1013
1014 edge->next_caller = callee->callers;
1015 if (callee->callers)
1016 callee->callers->prev_caller = edge;
1017 edge->next_callee = caller->callees;
1018 if (caller->callees)
1019 caller->callees->prev_callee = edge;
1020 caller->callees = edge;
1021 callee->callers = edge;
1022
1023 return edge;
1024 }
1025
1026 /* Allocate cgraph_indirect_call_info and set its fields to default values. */
1027
1028 struct cgraph_indirect_call_info *
1029 cgraph_allocate_init_indirect_info (void)
1030 {
1031 struct cgraph_indirect_call_info *ii;
1032
1033 ii = ggc_alloc_cleared_cgraph_indirect_call_info ();
1034 ii->param_index = -1;
1035 return ii;
1036 }
1037
1038 /* Create an indirect edge with a yet-undetermined callee where the call
1039 statement destination is a formal parameter of the caller with index
1040 PARAM_INDEX. */
1041
1042 struct cgraph_edge *
1043 cgraph_create_indirect_edge (struct cgraph_node *caller, gimple call_stmt,
1044 int ecf_flags,
1045 gcov_type count, int freq)
1046 {
1047 struct cgraph_edge *edge = cgraph_create_edge_1 (caller, NULL, call_stmt,
1048 count, freq);
1049
1050 edge->indirect_unknown_callee = 1;
1051 initialize_inline_failed (edge);
1052
1053 edge->indirect_info = cgraph_allocate_init_indirect_info ();
1054 edge->indirect_info->ecf_flags = ecf_flags;
1055
1056 edge->next_callee = caller->indirect_calls;
1057 if (caller->indirect_calls)
1058 caller->indirect_calls->prev_callee = edge;
1059 caller->indirect_calls = edge;
1060
1061 return edge;
1062 }
1063
1064 /* Remove the edge E from the list of the callers of the callee. */
1065
1066 static inline void
1067 cgraph_edge_remove_callee (struct cgraph_edge *e)
1068 {
1069 gcc_assert (!e->indirect_unknown_callee);
1070 if (e->prev_caller)
1071 e->prev_caller->next_caller = e->next_caller;
1072 if (e->next_caller)
1073 e->next_caller->prev_caller = e->prev_caller;
1074 if (!e->prev_caller)
1075 e->callee->callers = e->next_caller;
1076 }
1077
1078 /* Remove the edge E from the list of the callees of the caller. */
1079
1080 static inline void
1081 cgraph_edge_remove_caller (struct cgraph_edge *e)
1082 {
1083 if (e->prev_callee)
1084 e->prev_callee->next_callee = e->next_callee;
1085 if (e->next_callee)
1086 e->next_callee->prev_callee = e->prev_callee;
1087 if (!e->prev_callee)
1088 {
1089 if (e->indirect_unknown_callee)
1090 e->caller->indirect_calls = e->next_callee;
1091 else
1092 e->caller->callees = e->next_callee;
1093 }
1094 if (e->caller->call_site_hash)
1095 htab_remove_elt_with_hash (e->caller->call_site_hash,
1096 e->call_stmt,
1097 htab_hash_pointer (e->call_stmt));
1098 }
1099
1100 /* Put the edge onto the free list. */
1101
1102 static void
1103 cgraph_free_edge (struct cgraph_edge *e)
1104 {
1105 int uid = e->uid;
1106
1107 /* Clear out the edge so we do not dangle pointers. */
1108 memset (e, 0, sizeof (*e));
1109 e->uid = uid;
1110 NEXT_FREE_EDGE (e) = free_edges;
1111 free_edges = e;
1112 }
1113
1114 /* Remove the edge E in the cgraph. */
1115
1116 void
1117 cgraph_remove_edge (struct cgraph_edge *e)
1118 {
1119 /* Call all edge removal hooks. */
1120 cgraph_call_edge_removal_hooks (e);
1121
1122 if (!e->indirect_unknown_callee)
1123 /* Remove from callers list of the callee. */
1124 cgraph_edge_remove_callee (e);
1125
1126 /* Remove from callees list of the callers. */
1127 cgraph_edge_remove_caller (e);
1128
1129 /* Put the edge onto the free list. */
1130 cgraph_free_edge (e);
1131 }
1132
1133 /* Set callee of call graph edge E and add it to the corresponding set of
1134 callers. */
1135
1136 static void
1137 cgraph_set_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
1138 {
1139 e->prev_caller = NULL;
1140 if (n->callers)
1141 n->callers->prev_caller = e;
1142 e->next_caller = n->callers;
1143 n->callers = e;
1144 e->callee = n;
1145 }
1146
1147 /* Redirect callee of E to N. The function does not update underlying
1148 call expression. */
1149
1150 void
1151 cgraph_redirect_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
1152 {
1153 /* Remove from callers list of the current callee. */
1154 cgraph_edge_remove_callee (e);
1155
1156 /* Insert to callers list of the new callee. */
1157 cgraph_set_edge_callee (e, n);
1158 }
1159
1160 /* Make an indirect EDGE with an unknown callee an ordinary edge leading to
1161 CALLEE. DELTA is an integer constant that is to be added to the this
1162 pointer (first parameter) to compensate for skipping a thunk adjustment. */
1163
1164 void
1165 cgraph_make_edge_direct (struct cgraph_edge *edge, struct cgraph_node *callee)
1166 {
1167 edge->indirect_unknown_callee = 0;
1168
1169 /* Get the edge out of the indirect edge list. */
1170 if (edge->prev_callee)
1171 edge->prev_callee->next_callee = edge->next_callee;
1172 if (edge->next_callee)
1173 edge->next_callee->prev_callee = edge->prev_callee;
1174 if (!edge->prev_callee)
1175 edge->caller->indirect_calls = edge->next_callee;
1176
1177 /* Put it into the normal callee list */
1178 edge->prev_callee = NULL;
1179 edge->next_callee = edge->caller->callees;
1180 if (edge->caller->callees)
1181 edge->caller->callees->prev_callee = edge;
1182 edge->caller->callees = edge;
1183
1184 /* Insert to callers list of the new callee. */
1185 cgraph_set_edge_callee (edge, callee);
1186
1187 /* We need to re-determine the inlining status of the edge. */
1188 initialize_inline_failed (edge);
1189 }
1190
1191
1192 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1193 OLD_STMT changed into NEW_STMT. OLD_CALL is gimple_call_fndecl
1194 of OLD_STMT if it was previously call statement.
1195 If NEW_STMT is NULL, the call has been dropped without any
1196 replacement. */
1197
1198 static void
1199 cgraph_update_edges_for_call_stmt_node (struct cgraph_node *node,
1200 gimple old_stmt, tree old_call,
1201 gimple new_stmt)
1202 {
1203 tree new_call = (new_stmt && is_gimple_call (new_stmt))
1204 ? gimple_call_fndecl (new_stmt) : 0;
1205
1206 /* We are seeing indirect calls, then there is nothing to update. */
1207 if (!new_call && !old_call)
1208 return;
1209 /* See if we turned indirect call into direct call or folded call to one builtin
1210 into different builtin. */
1211 if (old_call != new_call)
1212 {
1213 struct cgraph_edge *e = cgraph_edge (node, old_stmt);
1214 struct cgraph_edge *ne = NULL;
1215 gcov_type count;
1216 int frequency;
1217
1218 if (e)
1219 {
1220 /* See if the edge is already there and has the correct callee. It
1221 might be so because of indirect inlining has already updated
1222 it. We also might've cloned and redirected the edge. */
1223 if (new_call && e->callee)
1224 {
1225 struct cgraph_node *callee = e->callee;
1226 while (callee)
1227 {
1228 if (callee->decl == new_call
1229 || callee->former_clone_of == new_call)
1230 return;
1231 callee = callee->clone_of;
1232 }
1233 }
1234
1235 /* Otherwise remove edge and create new one; we can't simply redirect
1236 since function has changed, so inline plan and other information
1237 attached to edge is invalid. */
1238 count = e->count;
1239 frequency = e->frequency;
1240 cgraph_remove_edge (e);
1241 }
1242 else if (new_call)
1243 {
1244 /* We are seeing new direct call; compute profile info based on BB. */
1245 basic_block bb = gimple_bb (new_stmt);
1246 count = bb->count;
1247 frequency = compute_call_stmt_bb_frequency (current_function_decl,
1248 bb);
1249 }
1250
1251 if (new_call)
1252 {
1253 ne = cgraph_create_edge (node, cgraph_get_create_node (new_call),
1254 new_stmt, count, frequency);
1255 gcc_assert (ne->inline_failed);
1256 }
1257 }
1258 /* We only updated the call stmt; update pointer in cgraph edge.. */
1259 else if (old_stmt != new_stmt)
1260 cgraph_set_call_stmt (cgraph_edge (node, old_stmt), new_stmt);
1261 }
1262
1263 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1264 OLD_STMT changed into NEW_STMT. OLD_DECL is gimple_call_fndecl
1265 of OLD_STMT before it was updated (updating can happen inplace). */
1266
1267 void
1268 cgraph_update_edges_for_call_stmt (gimple old_stmt, tree old_decl, gimple new_stmt)
1269 {
1270 struct cgraph_node *orig = cgraph_get_node (cfun->decl);
1271 struct cgraph_node *node;
1272
1273 gcc_checking_assert (orig);
1274 cgraph_update_edges_for_call_stmt_node (orig, old_stmt, old_decl, new_stmt);
1275 if (orig->clones)
1276 for (node = orig->clones; node != orig;)
1277 {
1278 cgraph_update_edges_for_call_stmt_node (node, old_stmt, old_decl, new_stmt);
1279 if (node->clones)
1280 node = node->clones;
1281 else if (node->next_sibling_clone)
1282 node = node->next_sibling_clone;
1283 else
1284 {
1285 while (node != orig && !node->next_sibling_clone)
1286 node = node->clone_of;
1287 if (node != orig)
1288 node = node->next_sibling_clone;
1289 }
1290 }
1291 }
1292
1293
1294 /* Remove all callees from the node. */
1295
1296 void
1297 cgraph_node_remove_callees (struct cgraph_node *node)
1298 {
1299 struct cgraph_edge *e, *f;
1300
1301 /* It is sufficient to remove the edges from the lists of callers of
1302 the callees. The callee list of the node can be zapped with one
1303 assignment. */
1304 for (e = node->callees; e; e = f)
1305 {
1306 f = e->next_callee;
1307 cgraph_call_edge_removal_hooks (e);
1308 if (!e->indirect_unknown_callee)
1309 cgraph_edge_remove_callee (e);
1310 cgraph_free_edge (e);
1311 }
1312 for (e = node->indirect_calls; e; e = f)
1313 {
1314 f = e->next_callee;
1315 cgraph_call_edge_removal_hooks (e);
1316 if (!e->indirect_unknown_callee)
1317 cgraph_edge_remove_callee (e);
1318 cgraph_free_edge (e);
1319 }
1320 node->indirect_calls = NULL;
1321 node->callees = NULL;
1322 if (node->call_site_hash)
1323 {
1324 htab_delete (node->call_site_hash);
1325 node->call_site_hash = NULL;
1326 }
1327 }
1328
1329 /* Remove all callers from the node. */
1330
1331 static void
1332 cgraph_node_remove_callers (struct cgraph_node *node)
1333 {
1334 struct cgraph_edge *e, *f;
1335
1336 /* It is sufficient to remove the edges from the lists of callees of
1337 the callers. The caller list of the node can be zapped with one
1338 assignment. */
1339 for (e = node->callers; e; e = f)
1340 {
1341 f = e->next_caller;
1342 cgraph_call_edge_removal_hooks (e);
1343 cgraph_edge_remove_caller (e);
1344 cgraph_free_edge (e);
1345 }
1346 node->callers = NULL;
1347 }
1348
1349 /* Release memory used to represent body of function NODE. */
1350
1351 void
1352 cgraph_release_function_body (struct cgraph_node *node)
1353 {
1354 if (DECL_STRUCT_FUNCTION (node->decl))
1355 {
1356 tree old_decl = current_function_decl;
1357 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
1358 if (cfun->gimple_df)
1359 {
1360 current_function_decl = node->decl;
1361 delete_tree_ssa ();
1362 delete_tree_cfg_annotations ();
1363 cfun->eh = NULL;
1364 current_function_decl = old_decl;
1365 }
1366 if (cfun->cfg)
1367 {
1368 gcc_assert (dom_computed[0] == DOM_NONE);
1369 gcc_assert (dom_computed[1] == DOM_NONE);
1370 clear_edges ();
1371 }
1372 if (cfun->value_histograms)
1373 free_histograms ();
1374 gcc_assert (!current_loops);
1375 pop_cfun();
1376 gimple_set_body (node->decl, NULL);
1377 VEC_free (ipa_opt_pass, heap,
1378 node->ipa_transforms_to_apply);
1379 /* Struct function hangs a lot of data that would leak if we didn't
1380 removed all pointers to it. */
1381 ggc_free (DECL_STRUCT_FUNCTION (node->decl));
1382 DECL_STRUCT_FUNCTION (node->decl) = NULL;
1383 }
1384 DECL_SAVED_TREE (node->decl) = NULL;
1385 /* If the node is abstract and needed, then do not clear DECL_INITIAL
1386 of its associated function function declaration because it's
1387 needed to emit debug info later. */
1388 if (!node->abstract_and_needed)
1389 DECL_INITIAL (node->decl) = error_mark_node;
1390 }
1391
1392 /* Remove the node from cgraph. */
1393
1394 void
1395 cgraph_remove_node (struct cgraph_node *node)
1396 {
1397 void **slot;
1398 bool kill_body = false;
1399 struct cgraph_node *n;
1400 int uid = node->uid;
1401
1402 cgraph_call_node_removal_hooks (node);
1403 cgraph_node_remove_callers (node);
1404 cgraph_node_remove_callees (node);
1405 ipa_remove_all_references (&node->ref_list);
1406 ipa_remove_all_refering (&node->ref_list);
1407 VEC_free (ipa_opt_pass, heap,
1408 node->ipa_transforms_to_apply);
1409
1410 /* Incremental inlining access removed nodes stored in the postorder list.
1411 */
1412 node->needed = node->reachable = false;
1413 for (n = node->nested; n; n = n->next_nested)
1414 n->origin = NULL;
1415 node->nested = NULL;
1416 if (node->origin)
1417 {
1418 struct cgraph_node **node2 = &node->origin->nested;
1419
1420 while (*node2 != node)
1421 node2 = &(*node2)->next_nested;
1422 *node2 = node->next_nested;
1423 }
1424 if (node->previous)
1425 node->previous->next = node->next;
1426 else
1427 cgraph_nodes = node->next;
1428 if (node->next)
1429 node->next->previous = node->previous;
1430 node->next = NULL;
1431 node->previous = NULL;
1432 slot = htab_find_slot (cgraph_hash, node, NO_INSERT);
1433 if (*slot == node)
1434 {
1435 struct cgraph_node *next_inline_clone;
1436
1437 for (next_inline_clone = node->clones;
1438 next_inline_clone && next_inline_clone->decl != node->decl;
1439 next_inline_clone = next_inline_clone->next_sibling_clone)
1440 ;
1441
1442 /* If there is inline clone of the node being removed, we need
1443 to put it into the position of removed node and reorganize all
1444 other clones to be based on it. */
1445 if (next_inline_clone)
1446 {
1447 struct cgraph_node *n;
1448 struct cgraph_node *new_clones;
1449
1450 *slot = next_inline_clone;
1451
1452 /* Unlink inline clone from the list of clones of removed node. */
1453 if (next_inline_clone->next_sibling_clone)
1454 next_inline_clone->next_sibling_clone->prev_sibling_clone
1455 = next_inline_clone->prev_sibling_clone;
1456 if (next_inline_clone->prev_sibling_clone)
1457 {
1458 gcc_assert (node->clones != next_inline_clone);
1459 next_inline_clone->prev_sibling_clone->next_sibling_clone
1460 = next_inline_clone->next_sibling_clone;
1461 }
1462 else
1463 {
1464 gcc_assert (node->clones == next_inline_clone);
1465 node->clones = next_inline_clone->next_sibling_clone;
1466 }
1467
1468 new_clones = node->clones;
1469 node->clones = NULL;
1470
1471 /* Copy clone info. */
1472 next_inline_clone->clone = node->clone;
1473
1474 /* Now place it into clone tree at same level at NODE. */
1475 next_inline_clone->clone_of = node->clone_of;
1476 next_inline_clone->prev_sibling_clone = NULL;
1477 next_inline_clone->next_sibling_clone = NULL;
1478 if (node->clone_of)
1479 {
1480 if (node->clone_of->clones)
1481 node->clone_of->clones->prev_sibling_clone = next_inline_clone;
1482 next_inline_clone->next_sibling_clone = node->clone_of->clones;
1483 node->clone_of->clones = next_inline_clone;
1484 }
1485
1486 /* Merge the clone list. */
1487 if (new_clones)
1488 {
1489 if (!next_inline_clone->clones)
1490 next_inline_clone->clones = new_clones;
1491 else
1492 {
1493 n = next_inline_clone->clones;
1494 while (n->next_sibling_clone)
1495 n = n->next_sibling_clone;
1496 n->next_sibling_clone = new_clones;
1497 new_clones->prev_sibling_clone = n;
1498 }
1499 }
1500
1501 /* Update clone_of pointers. */
1502 n = new_clones;
1503 while (n)
1504 {
1505 n->clone_of = next_inline_clone;
1506 n = n->next_sibling_clone;
1507 }
1508 }
1509 else
1510 {
1511 htab_clear_slot (cgraph_hash, slot);
1512 kill_body = true;
1513 }
1514
1515 }
1516 if (node->prev_sibling_clone)
1517 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
1518 else if (node->clone_of)
1519 node->clone_of->clones = node->next_sibling_clone;
1520 if (node->next_sibling_clone)
1521 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
1522 if (node->clones)
1523 {
1524 struct cgraph_node *n, *next;
1525
1526 if (node->clone_of)
1527 {
1528 for (n = node->clones; n->next_sibling_clone; n = n->next_sibling_clone)
1529 n->clone_of = node->clone_of;
1530 n->clone_of = node->clone_of;
1531 n->next_sibling_clone = node->clone_of->clones;
1532 if (node->clone_of->clones)
1533 node->clone_of->clones->prev_sibling_clone = n;
1534 node->clone_of->clones = node->clones;
1535 }
1536 else
1537 {
1538 /* We are removing node with clones. this makes clones inconsistent,
1539 but assume they will be removed subsequently and just keep clone
1540 tree intact. This can happen in unreachable function removal since
1541 we remove unreachable functions in random order, not by bottom-up
1542 walk of clone trees. */
1543 for (n = node->clones; n; n = next)
1544 {
1545 next = n->next_sibling_clone;
1546 n->next_sibling_clone = NULL;
1547 n->prev_sibling_clone = NULL;
1548 n->clone_of = NULL;
1549 }
1550 }
1551 }
1552
1553 if (node->same_comdat_group)
1554 {
1555 struct cgraph_node *prev;
1556 for (prev = node->same_comdat_group;
1557 prev->same_comdat_group != node;
1558 prev = prev->same_comdat_group)
1559 ;
1560 if (node->same_comdat_group == prev)
1561 prev->same_comdat_group = NULL;
1562 else
1563 prev->same_comdat_group = node->same_comdat_group;
1564 node->same_comdat_group = NULL;
1565 }
1566
1567 /* While all the clones are removed after being proceeded, the function
1568 itself is kept in the cgraph even after it is compiled. Check whether
1569 we are done with this body and reclaim it proactively if this is the case.
1570 */
1571 if (!kill_body && *slot)
1572 {
1573 struct cgraph_node *n = (struct cgraph_node *) *slot;
1574 if (!n->clones && !n->clone_of && !n->global.inlined_to
1575 && (cgraph_global_info_ready
1576 && (TREE_ASM_WRITTEN (n->decl) || DECL_EXTERNAL (n->decl)
1577 || n->in_other_partition)))
1578 kill_body = true;
1579 }
1580 if (assembler_name_hash)
1581 {
1582 tree name = DECL_ASSEMBLER_NAME (node->decl);
1583 slot = htab_find_slot_with_hash (assembler_name_hash, name,
1584 decl_assembler_name_hash (name),
1585 NO_INSERT);
1586 /* Inline clones are not hashed. */
1587 if (slot && *slot == node)
1588 htab_clear_slot (assembler_name_hash, slot);
1589 }
1590
1591 if (kill_body)
1592 cgraph_release_function_body (node);
1593 node->decl = NULL;
1594 if (node->call_site_hash)
1595 {
1596 htab_delete (node->call_site_hash);
1597 node->call_site_hash = NULL;
1598 }
1599 cgraph_n_nodes--;
1600
1601 /* Clear out the node to NULL all pointers and add the node to the free
1602 list. */
1603 memset (node, 0, sizeof(*node));
1604 node->uid = uid;
1605 NEXT_FREE_NODE (node) = free_nodes;
1606 free_nodes = node;
1607 }
1608
1609 /* Add NEW_ to the same comdat group that OLD is in. */
1610
1611 void
1612 cgraph_add_to_same_comdat_group (struct cgraph_node *new_,
1613 struct cgraph_node *old)
1614 {
1615 gcc_assert (DECL_ONE_ONLY (old->decl));
1616 gcc_assert (!new_->same_comdat_group);
1617 gcc_assert (new_ != old);
1618
1619 DECL_COMDAT_GROUP (new_->decl) = DECL_COMDAT_GROUP (old->decl);
1620 new_->same_comdat_group = old;
1621 if (!old->same_comdat_group)
1622 old->same_comdat_group = new_;
1623 else
1624 {
1625 struct cgraph_node *n;
1626 for (n = old->same_comdat_group;
1627 n->same_comdat_group != old;
1628 n = n->same_comdat_group)
1629 ;
1630 n->same_comdat_group = new_;
1631 }
1632 }
1633
1634 /* Remove the node from cgraph. */
1635
1636 void
1637 cgraph_remove_node_and_inline_clones (struct cgraph_node *node)
1638 {
1639 struct cgraph_edge *e, *next;
1640 for (e = node->callees; e; e = next)
1641 {
1642 next = e->next_callee;
1643 if (!e->inline_failed)
1644 cgraph_remove_node_and_inline_clones (e->callee);
1645 }
1646 cgraph_remove_node (node);
1647 }
1648
1649 /* Notify finalize_compilation_unit that given node is reachable. */
1650
1651 void
1652 cgraph_mark_reachable_node (struct cgraph_node *node)
1653 {
1654 if (!node->reachable && node->local.finalized)
1655 {
1656 if (cgraph_global_info_ready)
1657 {
1658 /* Verify that function does not appear to be needed out of blue
1659 during the optimization process. This can happen for extern
1660 inlines when bodies was removed after inlining. */
1661 gcc_assert ((node->analyzed || node->in_other_partition
1662 || DECL_EXTERNAL (node->decl)));
1663 }
1664 else
1665 notice_global_symbol (node->decl);
1666 node->reachable = 1;
1667
1668 node->next_needed = cgraph_nodes_queue;
1669 cgraph_nodes_queue = node;
1670 }
1671 }
1672
1673 /* Likewise indicate that a node is needed, i.e. reachable via some
1674 external means. */
1675
1676 void
1677 cgraph_mark_needed_node (struct cgraph_node *node)
1678 {
1679 node->needed = 1;
1680 gcc_assert (!node->global.inlined_to);
1681 cgraph_mark_reachable_node (node);
1682 }
1683
1684 /* Likewise indicate that a node is having address taken. */
1685
1686 void
1687 cgraph_mark_address_taken_node (struct cgraph_node *node)
1688 {
1689 gcc_assert (!node->global.inlined_to);
1690 cgraph_mark_reachable_node (node);
1691 /* FIXME: address_taken flag is used both as a shortcut for testing whether
1692 IPA_REF_ADDR reference exists (and thus it should be set on node
1693 representing alias we take address of) and as a test whether address
1694 of the object was taken (and thus it should be set on node alias is
1695 referring to). We should remove the first use and the remove the
1696 following set. */
1697 node->address_taken = 1;
1698 node = cgraph_function_or_thunk_node (node, NULL);
1699 node->address_taken = 1;
1700 }
1701
1702 /* Return local info for the compiled function. */
1703
1704 struct cgraph_local_info *
1705 cgraph_local_info (tree decl)
1706 {
1707 struct cgraph_node *node;
1708
1709 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1710 node = cgraph_get_node (decl);
1711 if (!node)
1712 return NULL;
1713 return &node->local;
1714 }
1715
1716 /* Return local info for the compiled function. */
1717
1718 struct cgraph_global_info *
1719 cgraph_global_info (tree decl)
1720 {
1721 struct cgraph_node *node;
1722
1723 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL && cgraph_global_info_ready);
1724 node = cgraph_get_node (decl);
1725 if (!node)
1726 return NULL;
1727 return &node->global;
1728 }
1729
1730 /* Return local info for the compiled function. */
1731
1732 struct cgraph_rtl_info *
1733 cgraph_rtl_info (tree decl)
1734 {
1735 struct cgraph_node *node;
1736
1737 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1738 node = cgraph_get_node (decl);
1739 if (!node
1740 || (decl != current_function_decl
1741 && !TREE_ASM_WRITTEN (node->decl)))
1742 return NULL;
1743 return &node->rtl;
1744 }
1745
1746 /* Return a string describing the failure REASON. */
1747
1748 const char*
1749 cgraph_inline_failed_string (cgraph_inline_failed_t reason)
1750 {
1751 #undef DEFCIFCODE
1752 #define DEFCIFCODE(code, string) string,
1753
1754 static const char *cif_string_table[CIF_N_REASONS] = {
1755 #include "cif-code.def"
1756 };
1757
1758 /* Signedness of an enum type is implementation defined, so cast it
1759 to unsigned before testing. */
1760 gcc_assert ((unsigned) reason < CIF_N_REASONS);
1761 return cif_string_table[reason];
1762 }
1763
1764 /* Return name of the node used in debug output. */
1765 const char *
1766 cgraph_node_name (struct cgraph_node *node)
1767 {
1768 return lang_hooks.decl_printable_name (node->decl, 2);
1769 }
1770
1771 /* Names used to print out the availability enum. */
1772 const char * const cgraph_availability_names[] =
1773 {"unset", "not_available", "overwritable", "available", "local"};
1774
1775
1776 /* Dump call graph node NODE to file F. */
1777
1778 void
1779 dump_cgraph_node (FILE *f, struct cgraph_node *node)
1780 {
1781 struct cgraph_edge *edge;
1782 int indirect_calls_count = 0;
1783
1784 fprintf (f, "%s/%i", cgraph_node_name (node), node->uid);
1785 dump_addr (f, " @", (void *)node);
1786 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
1787 fprintf (f, " (asm: %s)", IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->decl)));
1788 if (node->global.inlined_to)
1789 fprintf (f, " (inline copy in %s/%i)",
1790 cgraph_node_name (node->global.inlined_to),
1791 node->global.inlined_to->uid);
1792 if (node->same_comdat_group)
1793 fprintf (f, " (same comdat group as %s/%i)",
1794 cgraph_node_name (node->same_comdat_group),
1795 node->same_comdat_group->uid);
1796 if (node->clone_of)
1797 fprintf (f, " (clone of %s/%i)",
1798 cgraph_node_name (node->clone_of),
1799 node->clone_of->uid);
1800 if (cgraph_function_flags_ready)
1801 fprintf (f, " availability:%s",
1802 cgraph_availability_names [cgraph_function_body_availability (node)]);
1803 if (node->analyzed)
1804 fprintf (f, " analyzed");
1805 if (node->in_other_partition)
1806 fprintf (f, " in_other_partition");
1807 if (node->count)
1808 fprintf (f, " executed "HOST_WIDEST_INT_PRINT_DEC"x",
1809 (HOST_WIDEST_INT)node->count);
1810 if (node->origin)
1811 fprintf (f, " nested in: %s", cgraph_node_name (node->origin));
1812 if (node->needed)
1813 fprintf (f, " needed");
1814 if (node->address_taken)
1815 fprintf (f, " address_taken");
1816 else if (node->reachable)
1817 fprintf (f, " reachable");
1818 else if (node->reachable_from_other_partition)
1819 fprintf (f, " reachable_from_other_partition");
1820 if (gimple_has_body_p (node->decl))
1821 fprintf (f, " body");
1822 if (node->process)
1823 fprintf (f, " process");
1824 if (node->local.local)
1825 fprintf (f, " local");
1826 if (node->local.externally_visible)
1827 fprintf (f, " externally_visible");
1828 if (node->resolution != LDPR_UNKNOWN)
1829 fprintf (f, " %s",
1830 ld_plugin_symbol_resolution_names[(int)node->resolution]);
1831 if (node->local.finalized)
1832 fprintf (f, " finalized");
1833 if (node->local.redefined_extern_inline)
1834 fprintf (f, " redefined_extern_inline");
1835 if (TREE_ASM_WRITTEN (node->decl))
1836 fprintf (f, " asm_written");
1837 if (node->only_called_at_startup)
1838 fprintf (f, " only_called_at_startup");
1839 if (node->only_called_at_exit)
1840 fprintf (f, " only_called_at_exit");
1841
1842 fprintf (f, "\n");
1843
1844 if (node->thunk.thunk_p)
1845 {
1846 fprintf (f, " thunk of %s (asm: %s) fixed offset %i virtual value %i has "
1847 "virtual offset %i)\n",
1848 lang_hooks.decl_printable_name (node->thunk.alias, 2),
1849 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->thunk.alias)),
1850 (int)node->thunk.fixed_offset,
1851 (int)node->thunk.virtual_value,
1852 (int)node->thunk.virtual_offset_p);
1853 }
1854 if (node->alias && node->thunk.alias)
1855 {
1856 fprintf (f, " alias of %s",
1857 lang_hooks.decl_printable_name (node->thunk.alias, 2));
1858 if (DECL_ASSEMBLER_NAME_SET_P (node->thunk.alias))
1859 fprintf (f, " (asm: %s)",
1860 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->thunk.alias)));
1861 fprintf (f, "\n");
1862 }
1863
1864 fprintf (f, " called by: ");
1865
1866 for (edge = node->callers; edge; edge = edge->next_caller)
1867 {
1868 fprintf (f, "%s/%i ", cgraph_node_name (edge->caller),
1869 edge->caller->uid);
1870 if (edge->count)
1871 fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
1872 (HOST_WIDEST_INT)edge->count);
1873 if (edge->frequency)
1874 fprintf (f, "(%.2f per call) ",
1875 edge->frequency / (double)CGRAPH_FREQ_BASE);
1876 if (!edge->inline_failed)
1877 fprintf(f, "(inlined) ");
1878 if (edge->indirect_inlining_edge)
1879 fprintf(f, "(indirect_inlining) ");
1880 if (edge->can_throw_external)
1881 fprintf(f, "(can throw external) ");
1882 }
1883
1884 fprintf (f, "\n calls: ");
1885 for (edge = node->callees; edge; edge = edge->next_callee)
1886 {
1887 fprintf (f, "%s/%i ", cgraph_node_name (edge->callee),
1888 edge->callee->uid);
1889 if (!edge->inline_failed)
1890 fprintf(f, "(inlined) ");
1891 if (edge->indirect_inlining_edge)
1892 fprintf(f, "(indirect_inlining) ");
1893 if (edge->count)
1894 fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
1895 (HOST_WIDEST_INT)edge->count);
1896 if (edge->frequency)
1897 fprintf (f, "(%.2f per call) ",
1898 edge->frequency / (double)CGRAPH_FREQ_BASE);
1899 if (edge->can_throw_external)
1900 fprintf(f, "(can throw external) ");
1901 }
1902 fprintf (f, "\n");
1903 fprintf (f, " References: ");
1904 ipa_dump_references (f, &node->ref_list);
1905 fprintf (f, " Refering this function: ");
1906 ipa_dump_refering (f, &node->ref_list);
1907
1908 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
1909 indirect_calls_count++;
1910 if (indirect_calls_count)
1911 fprintf (f, " has %i outgoing edges for indirect calls.\n",
1912 indirect_calls_count);
1913 }
1914
1915
1916 /* Dump call graph node NODE to stderr. */
1917
1918 DEBUG_FUNCTION void
1919 debug_cgraph_node (struct cgraph_node *node)
1920 {
1921 dump_cgraph_node (stderr, node);
1922 }
1923
1924
1925 /* Dump the callgraph to file F. */
1926
1927 void
1928 dump_cgraph (FILE *f)
1929 {
1930 struct cgraph_node *node;
1931
1932 fprintf (f, "callgraph:\n\n");
1933 for (node = cgraph_nodes; node; node = node->next)
1934 dump_cgraph_node (f, node);
1935 }
1936
1937
1938 /* Dump the call graph to stderr. */
1939
1940 DEBUG_FUNCTION void
1941 debug_cgraph (void)
1942 {
1943 dump_cgraph (stderr);
1944 }
1945
1946
1947 /* Set the DECL_ASSEMBLER_NAME and update cgraph hashtables. */
1948
1949 void
1950 change_decl_assembler_name (tree decl, tree name)
1951 {
1952 struct cgraph_node *node;
1953 void **slot;
1954 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
1955 SET_DECL_ASSEMBLER_NAME (decl, name);
1956 else
1957 {
1958 if (name == DECL_ASSEMBLER_NAME (decl))
1959 return;
1960
1961 if (assembler_name_hash
1962 && TREE_CODE (decl) == FUNCTION_DECL
1963 && (node = cgraph_get_node (decl)) != NULL)
1964 {
1965 tree old_name = DECL_ASSEMBLER_NAME (decl);
1966 slot = htab_find_slot_with_hash (assembler_name_hash, old_name,
1967 decl_assembler_name_hash (old_name),
1968 NO_INSERT);
1969 /* Inline clones are not hashed. */
1970 if (slot && *slot == node)
1971 htab_clear_slot (assembler_name_hash, slot);
1972 }
1973 if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))
1974 && DECL_RTL_SET_P (decl))
1975 warning (0, "%D renamed after being referenced in assembly", decl);
1976
1977 SET_DECL_ASSEMBLER_NAME (decl, name);
1978 }
1979 if (assembler_name_hash
1980 && TREE_CODE (decl) == FUNCTION_DECL
1981 && (node = cgraph_get_node (decl)) != NULL)
1982 {
1983 slot = htab_find_slot_with_hash (assembler_name_hash, name,
1984 decl_assembler_name_hash (name),
1985 INSERT);
1986 gcc_assert (!*slot);
1987 *slot = node;
1988 }
1989 }
1990
1991 /* Add a top-level asm statement to the list. */
1992
1993 struct cgraph_asm_node *
1994 cgraph_add_asm_node (tree asm_str)
1995 {
1996 struct cgraph_asm_node *node;
1997
1998 node = ggc_alloc_cleared_cgraph_asm_node ();
1999 node->asm_str = asm_str;
2000 node->order = cgraph_order++;
2001 node->next = NULL;
2002 if (cgraph_asm_nodes == NULL)
2003 cgraph_asm_nodes = node;
2004 else
2005 cgraph_asm_last_node->next = node;
2006 cgraph_asm_last_node = node;
2007 return node;
2008 }
2009
2010 /* Return true when the DECL can possibly be inlined. */
2011 bool
2012 cgraph_function_possibly_inlined_p (tree decl)
2013 {
2014 if (!cgraph_global_info_ready)
2015 return !DECL_UNINLINABLE (decl);
2016 return DECL_POSSIBLY_INLINED (decl);
2017 }
2018
2019 /* Create clone of E in the node N represented by CALL_EXPR the callgraph. */
2020 struct cgraph_edge *
2021 cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n,
2022 gimple call_stmt, unsigned stmt_uid, gcov_type count_scale,
2023 int freq_scale, bool update_original)
2024 {
2025 struct cgraph_edge *new_edge;
2026 gcov_type count = e->count * count_scale / REG_BR_PROB_BASE;
2027 gcov_type freq;
2028
2029 /* We do not want to ignore loop nest after frequency drops to 0. */
2030 if (!freq_scale)
2031 freq_scale = 1;
2032 freq = e->frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE;
2033 if (freq > CGRAPH_FREQ_MAX)
2034 freq = CGRAPH_FREQ_MAX;
2035
2036 if (e->indirect_unknown_callee)
2037 {
2038 tree decl;
2039
2040 if (call_stmt && (decl = gimple_call_fndecl (call_stmt)))
2041 {
2042 struct cgraph_node *callee = cgraph_get_node (decl);
2043 gcc_checking_assert (callee);
2044 new_edge = cgraph_create_edge (n, callee, call_stmt, count, freq);
2045 }
2046 else
2047 {
2048 new_edge = cgraph_create_indirect_edge (n, call_stmt,
2049 e->indirect_info->ecf_flags,
2050 count, freq);
2051 *new_edge->indirect_info = *e->indirect_info;
2052 }
2053 }
2054 else
2055 {
2056 new_edge = cgraph_create_edge (n, e->callee, call_stmt, count, freq);
2057 if (e->indirect_info)
2058 {
2059 new_edge->indirect_info
2060 = ggc_alloc_cleared_cgraph_indirect_call_info ();
2061 *new_edge->indirect_info = *e->indirect_info;
2062 }
2063 }
2064
2065 new_edge->inline_failed = e->inline_failed;
2066 new_edge->indirect_inlining_edge = e->indirect_inlining_edge;
2067 new_edge->lto_stmt_uid = stmt_uid;
2068 /* Clone flags that depend on call_stmt availability manually. */
2069 new_edge->can_throw_external = e->can_throw_external;
2070 new_edge->call_stmt_cannot_inline_p = e->call_stmt_cannot_inline_p;
2071 if (update_original)
2072 {
2073 e->count -= new_edge->count;
2074 if (e->count < 0)
2075 e->count = 0;
2076 }
2077 cgraph_call_edge_duplication_hooks (e, new_edge);
2078 return new_edge;
2079 }
2080
2081
2082 /* Create node representing clone of N executed COUNT times. Decrease
2083 the execution counts from original node too.
2084 The new clone will have decl set to DECL that may or may not be the same
2085 as decl of N.
2086
2087 When UPDATE_ORIGINAL is true, the counts are subtracted from the original
2088 function's profile to reflect the fact that part of execution is handled
2089 by node.
2090 When CALL_DUPLICATOIN_HOOK is true, the ipa passes are acknowledged about
2091 the new clone. Otherwise the caller is responsible for doing so later. */
2092
2093 struct cgraph_node *
2094 cgraph_clone_node (struct cgraph_node *n, tree decl, gcov_type count, int freq,
2095 bool update_original,
2096 VEC(cgraph_edge_p,heap) *redirect_callers,
2097 bool call_duplication_hook)
2098 {
2099 struct cgraph_node *new_node = cgraph_create_node_1 ();
2100 struct cgraph_edge *e;
2101 gcov_type count_scale;
2102 unsigned i;
2103
2104 new_node->decl = decl;
2105 new_node->origin = n->origin;
2106 if (new_node->origin)
2107 {
2108 new_node->next_nested = new_node->origin->nested;
2109 new_node->origin->nested = new_node;
2110 }
2111 new_node->analyzed = n->analyzed;
2112 new_node->local = n->local;
2113 new_node->local.externally_visible = false;
2114 new_node->local.local = true;
2115 new_node->global = n->global;
2116 new_node->rtl = n->rtl;
2117 new_node->count = count;
2118 new_node->frequency = n->frequency;
2119 new_node->clone = n->clone;
2120 new_node->clone.tree_map = 0;
2121 if (n->count)
2122 {
2123 if (new_node->count > n->count)
2124 count_scale = REG_BR_PROB_BASE;
2125 else
2126 count_scale = new_node->count * REG_BR_PROB_BASE / n->count;
2127 }
2128 else
2129 count_scale = 0;
2130 if (update_original)
2131 {
2132 n->count -= count;
2133 if (n->count < 0)
2134 n->count = 0;
2135 }
2136
2137 FOR_EACH_VEC_ELT (cgraph_edge_p, redirect_callers, i, e)
2138 {
2139 /* Redirect calls to the old version node to point to its new
2140 version. */
2141 cgraph_redirect_edge_callee (e, new_node);
2142 }
2143
2144
2145 for (e = n->callees;e; e=e->next_callee)
2146 cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
2147 count_scale, freq, update_original);
2148
2149 for (e = n->indirect_calls; e; e = e->next_callee)
2150 cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
2151 count_scale, freq, update_original);
2152 ipa_clone_references (new_node, NULL, &n->ref_list);
2153
2154 new_node->next_sibling_clone = n->clones;
2155 if (n->clones)
2156 n->clones->prev_sibling_clone = new_node;
2157 n->clones = new_node;
2158 new_node->clone_of = n;
2159
2160 if (n->decl != decl)
2161 {
2162 struct cgraph_node **slot;
2163 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, new_node, INSERT);
2164 gcc_assert (!*slot);
2165 *slot = new_node;
2166 if (assembler_name_hash)
2167 {
2168 void **aslot;
2169 tree name = DECL_ASSEMBLER_NAME (decl);
2170
2171 aslot = htab_find_slot_with_hash (assembler_name_hash, name,
2172 decl_assembler_name_hash (name),
2173 INSERT);
2174 gcc_assert (!*aslot);
2175 *aslot = new_node;
2176 }
2177 }
2178
2179 if (call_duplication_hook)
2180 cgraph_call_node_duplication_hooks (n, new_node);
2181 return new_node;
2182 }
2183
2184 /* Create a new name for clone of DECL, add SUFFIX. Returns an identifier. */
2185
2186 static GTY(()) unsigned int clone_fn_id_num;
2187
2188 tree
2189 clone_function_name (tree decl, const char *suffix)
2190 {
2191 tree name = DECL_ASSEMBLER_NAME (decl);
2192 size_t len = IDENTIFIER_LENGTH (name);
2193 char *tmp_name, *prefix;
2194
2195 prefix = XALLOCAVEC (char, len + strlen (suffix) + 2);
2196 memcpy (prefix, IDENTIFIER_POINTER (name), len);
2197 strcpy (prefix + len + 1, suffix);
2198 #ifndef NO_DOT_IN_LABEL
2199 prefix[len] = '.';
2200 #elif !defined NO_DOLLAR_IN_LABEL
2201 prefix[len] = '$';
2202 #else
2203 prefix[len] = '_';
2204 #endif
2205 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix, clone_fn_id_num++);
2206 return get_identifier (tmp_name);
2207 }
2208
2209 /* Create callgraph node clone with new declaration. The actual body will
2210 be copied later at compilation stage.
2211
2212 TODO: after merging in ipa-sra use function call notes instead of args_to_skip
2213 bitmap interface.
2214 */
2215 struct cgraph_node *
2216 cgraph_create_virtual_clone (struct cgraph_node *old_node,
2217 VEC(cgraph_edge_p,heap) *redirect_callers,
2218 VEC(ipa_replace_map_p,gc) *tree_map,
2219 bitmap args_to_skip,
2220 const char * suffix)
2221 {
2222 tree old_decl = old_node->decl;
2223 struct cgraph_node *new_node = NULL;
2224 tree new_decl;
2225 size_t i;
2226 struct ipa_replace_map *map;
2227
2228 if (!flag_wpa)
2229 gcc_checking_assert (tree_versionable_function_p (old_decl));
2230
2231 gcc_assert (old_node->local.can_change_signature || !args_to_skip);
2232
2233 /* Make a new FUNCTION_DECL tree node */
2234 if (!args_to_skip)
2235 new_decl = copy_node (old_decl);
2236 else
2237 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
2238 DECL_STRUCT_FUNCTION (new_decl) = NULL;
2239
2240 /* Generate a new name for the new version. */
2241 DECL_NAME (new_decl) = clone_function_name (old_decl, suffix);
2242 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
2243 SET_DECL_RTL (new_decl, NULL);
2244
2245 new_node = cgraph_clone_node (old_node, new_decl, old_node->count,
2246 CGRAPH_FREQ_BASE, false,
2247 redirect_callers, false);
2248 /* Update the properties.
2249 Make clone visible only within this translation unit. Make sure
2250 that is not weak also.
2251 ??? We cannot use COMDAT linkage because there is no
2252 ABI support for this. */
2253 DECL_EXTERNAL (new_node->decl) = 0;
2254 if (DECL_ONE_ONLY (old_decl))
2255 DECL_SECTION_NAME (new_node->decl) = NULL;
2256 DECL_COMDAT_GROUP (new_node->decl) = 0;
2257 TREE_PUBLIC (new_node->decl) = 0;
2258 DECL_COMDAT (new_node->decl) = 0;
2259 DECL_WEAK (new_node->decl) = 0;
2260 DECL_STATIC_CONSTRUCTOR (new_node->decl) = 0;
2261 DECL_STATIC_DESTRUCTOR (new_node->decl) = 0;
2262 new_node->clone.tree_map = tree_map;
2263 new_node->clone.args_to_skip = args_to_skip;
2264 FOR_EACH_VEC_ELT (ipa_replace_map_p, tree_map, i, map)
2265 {
2266 tree var = map->new_tree;
2267
2268 STRIP_NOPS (var);
2269 if (TREE_CODE (var) != ADDR_EXPR)
2270 continue;
2271 var = get_base_var (var);
2272 if (!var)
2273 continue;
2274
2275 /* Record references of the future statement initializing the constant
2276 argument. */
2277 if (TREE_CODE (var) == FUNCTION_DECL)
2278 {
2279 struct cgraph_node *ref_node = cgraph_get_node (var);
2280 gcc_checking_assert (ref_node);
2281 ipa_record_reference (new_node, NULL, ref_node, NULL, IPA_REF_ADDR,
2282 NULL);
2283 }
2284 else if (TREE_CODE (var) == VAR_DECL)
2285 ipa_record_reference (new_node, NULL, NULL, varpool_node (var),
2286 IPA_REF_ADDR, NULL);
2287 }
2288 if (!args_to_skip)
2289 new_node->clone.combined_args_to_skip = old_node->clone.combined_args_to_skip;
2290 else if (old_node->clone.combined_args_to_skip)
2291 {
2292 int newi = 0, oldi = 0;
2293 tree arg;
2294 bitmap new_args_to_skip = BITMAP_GGC_ALLOC ();
2295 struct cgraph_node *orig_node;
2296 for (orig_node = old_node; orig_node->clone_of; orig_node = orig_node->clone_of)
2297 ;
2298 for (arg = DECL_ARGUMENTS (orig_node->decl); arg; arg = DECL_CHAIN (arg), oldi++)
2299 {
2300 if (bitmap_bit_p (old_node->clone.combined_args_to_skip, oldi))
2301 {
2302 bitmap_set_bit (new_args_to_skip, oldi);
2303 continue;
2304 }
2305 if (bitmap_bit_p (args_to_skip, newi))
2306 bitmap_set_bit (new_args_to_skip, oldi);
2307 newi++;
2308 }
2309 new_node->clone.combined_args_to_skip = new_args_to_skip;
2310 }
2311 else
2312 new_node->clone.combined_args_to_skip = args_to_skip;
2313 new_node->local.externally_visible = 0;
2314 new_node->local.local = 1;
2315 new_node->lowered = true;
2316 new_node->reachable = true;
2317
2318 cgraph_call_node_duplication_hooks (old_node, new_node);
2319
2320
2321 return new_node;
2322 }
2323
2324 /* NODE is no longer nested function; update cgraph accordingly. */
2325 void
2326 cgraph_unnest_node (struct cgraph_node *node)
2327 {
2328 struct cgraph_node **node2 = &node->origin->nested;
2329 gcc_assert (node->origin);
2330
2331 while (*node2 != node)
2332 node2 = &(*node2)->next_nested;
2333 *node2 = node->next_nested;
2334 node->origin = NULL;
2335 }
2336
2337 /* Return function availability. See cgraph.h for description of individual
2338 return values. */
2339 enum availability
2340 cgraph_function_body_availability (struct cgraph_node *node)
2341 {
2342 enum availability avail;
2343 gcc_assert (cgraph_function_flags_ready);
2344 if (!node->analyzed)
2345 avail = AVAIL_NOT_AVAILABLE;
2346 else if (node->local.local)
2347 avail = AVAIL_LOCAL;
2348 else if (!node->local.externally_visible)
2349 avail = AVAIL_AVAILABLE;
2350 /* Inline functions are safe to be analyzed even if their symbol can
2351 be overwritten at runtime. It is not meaningful to enforce any sane
2352 behaviour on replacing inline function by different body. */
2353 else if (DECL_DECLARED_INLINE_P (node->decl))
2354 avail = AVAIL_AVAILABLE;
2355
2356 /* If the function can be overwritten, return OVERWRITABLE. Take
2357 care at least of two notable extensions - the COMDAT functions
2358 used to share template instantiations in C++ (this is symmetric
2359 to code cp_cannot_inline_tree_fn and probably shall be shared and
2360 the inlinability hooks completely eliminated).
2361
2362 ??? Does the C++ one definition rule allow us to always return
2363 AVAIL_AVAILABLE here? That would be good reason to preserve this
2364 bit. */
2365
2366 else if (decl_replaceable_p (node->decl) && !DECL_EXTERNAL (node->decl))
2367 avail = AVAIL_OVERWRITABLE;
2368 else avail = AVAIL_AVAILABLE;
2369
2370 return avail;
2371 }
2372
2373 /* Add the function FNDECL to the call graph.
2374 Unlike cgraph_finalize_function, this function is intended to be used
2375 by middle end and allows insertion of new function at arbitrary point
2376 of compilation. The function can be either in high, low or SSA form
2377 GIMPLE.
2378
2379 The function is assumed to be reachable and have address taken (so no
2380 API breaking optimizations are performed on it).
2381
2382 Main work done by this function is to enqueue the function for later
2383 processing to avoid need the passes to be re-entrant. */
2384
2385 void
2386 cgraph_add_new_function (tree fndecl, bool lowered)
2387 {
2388 struct cgraph_node *node;
2389 switch (cgraph_state)
2390 {
2391 case CGRAPH_STATE_CONSTRUCTION:
2392 /* Just enqueue function to be processed at nearest occurrence. */
2393 node = cgraph_create_node (fndecl);
2394 node->next_needed = cgraph_new_nodes;
2395 if (lowered)
2396 node->lowered = true;
2397 cgraph_new_nodes = node;
2398 break;
2399
2400 case CGRAPH_STATE_IPA:
2401 case CGRAPH_STATE_IPA_SSA:
2402 case CGRAPH_STATE_EXPANSION:
2403 /* Bring the function into finalized state and enqueue for later
2404 analyzing and compilation. */
2405 node = cgraph_get_create_node (fndecl);
2406 node->local.local = false;
2407 node->local.finalized = true;
2408 node->reachable = node->needed = true;
2409 if (!lowered && cgraph_state == CGRAPH_STATE_EXPANSION)
2410 {
2411 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
2412 current_function_decl = fndecl;
2413 gimple_register_cfg_hooks ();
2414 tree_lowering_passes (fndecl);
2415 bitmap_obstack_initialize (NULL);
2416 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
2417 execute_pass_list (pass_early_local_passes.pass.sub);
2418 bitmap_obstack_release (NULL);
2419 pop_cfun ();
2420 current_function_decl = NULL;
2421
2422 lowered = true;
2423 }
2424 if (lowered)
2425 node->lowered = true;
2426 node->next_needed = cgraph_new_nodes;
2427 cgraph_new_nodes = node;
2428 break;
2429
2430 case CGRAPH_STATE_FINISHED:
2431 /* At the very end of compilation we have to do all the work up
2432 to expansion. */
2433 node = cgraph_create_node (fndecl);
2434 if (lowered)
2435 node->lowered = true;
2436 cgraph_analyze_function (node);
2437 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
2438 current_function_decl = fndecl;
2439 gimple_register_cfg_hooks ();
2440 bitmap_obstack_initialize (NULL);
2441 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
2442 execute_pass_list (pass_early_local_passes.pass.sub);
2443 bitmap_obstack_release (NULL);
2444 tree_rest_of_compilation (fndecl);
2445 pop_cfun ();
2446 current_function_decl = NULL;
2447 break;
2448 }
2449
2450 /* Set a personality if required and we already passed EH lowering. */
2451 if (lowered
2452 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
2453 == eh_personality_lang))
2454 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
2455 }
2456
2457 /* Worker for cgraph_node_can_be_local_p. */
2458 static bool
2459 cgraph_node_cannot_be_local_p_1 (struct cgraph_node *node,
2460 void *data ATTRIBUTE_UNUSED)
2461 {
2462 return !(!node->needed
2463 && ((DECL_COMDAT (node->decl) && !node->same_comdat_group)
2464 || !node->local.externally_visible));
2465 }
2466
2467 /* Return true if NODE can be made local for API change.
2468 Extern inline functions and C++ COMDAT functions can be made local
2469 at the expense of possible code size growth if function is used in multiple
2470 compilation units. */
2471 bool
2472 cgraph_node_can_be_local_p (struct cgraph_node *node)
2473 {
2474 return (!node->address_taken
2475 && !cgraph_for_node_and_aliases (node,
2476 cgraph_node_cannot_be_local_p_1,
2477 NULL, true));
2478 }
2479
2480 /* Make DECL local. FIXME: We shouldn't need to mess with rtl this early,
2481 but other code such as notice_global_symbol generates rtl. */
2482 void
2483 cgraph_make_decl_local (tree decl)
2484 {
2485 rtx rtl, symbol;
2486
2487 if (TREE_CODE (decl) == VAR_DECL)
2488 DECL_COMMON (decl) = 0;
2489 else gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
2490
2491 if (DECL_ONE_ONLY (decl) || DECL_COMDAT (decl))
2492 {
2493 /* It is possible that we are linking against library defining same COMDAT
2494 function. To avoid conflict we need to rename our local name of the
2495 function just in the case WHOPR partitioning decide to make it hidden
2496 to avoid cross partition references. */
2497 if (flag_wpa)
2498 {
2499 const char *old_name;
2500
2501 old_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
2502 if (TREE_CODE (decl) == FUNCTION_DECL)
2503 {
2504 struct cgraph_node *node = cgraph_get_node (decl);
2505 change_decl_assembler_name (decl,
2506 clone_function_name (decl, "local"));
2507 if (node->local.lto_file_data)
2508 lto_record_renamed_decl (node->local.lto_file_data,
2509 old_name,
2510 IDENTIFIER_POINTER
2511 (DECL_ASSEMBLER_NAME (decl)));
2512 }
2513 else if (TREE_CODE (decl) == VAR_DECL)
2514 {
2515 struct varpool_node *vnode = varpool_get_node (decl);
2516 /* change_decl_assembler_name will warn here on vtables because
2517 C++ frontend still sets TREE_SYMBOL_REFERENCED on them. */
2518 SET_DECL_ASSEMBLER_NAME (decl,
2519 clone_function_name (decl, "local"));
2520 if (vnode->lto_file_data)
2521 lto_record_renamed_decl (vnode->lto_file_data,
2522 old_name,
2523 IDENTIFIER_POINTER
2524 (DECL_ASSEMBLER_NAME (decl)));
2525 }
2526 }
2527 DECL_SECTION_NAME (decl) = 0;
2528 DECL_COMDAT (decl) = 0;
2529 }
2530 DECL_COMDAT_GROUP (decl) = 0;
2531 DECL_WEAK (decl) = 0;
2532 DECL_EXTERNAL (decl) = 0;
2533 TREE_PUBLIC (decl) = 0;
2534 if (!DECL_RTL_SET_P (decl))
2535 return;
2536
2537 /* Update rtl flags. */
2538 make_decl_rtl (decl);
2539
2540 rtl = DECL_RTL (decl);
2541 if (!MEM_P (rtl))
2542 return;
2543
2544 symbol = XEXP (rtl, 0);
2545 if (GET_CODE (symbol) != SYMBOL_REF)
2546 return;
2547
2548 SYMBOL_REF_WEAK (symbol) = DECL_WEAK (decl);
2549 }
2550
2551 /* Call calback on NODE, thunks and aliases asociated to NODE.
2552 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
2553 skipped. */
2554
2555 bool
2556 cgraph_for_node_thunks_and_aliases (struct cgraph_node *node,
2557 bool (*callback) (struct cgraph_node *, void *),
2558 void *data,
2559 bool include_overwritable)
2560 {
2561 struct cgraph_edge *e;
2562 int i;
2563 struct ipa_ref *ref;
2564
2565 if (callback (node, data))
2566 return true;
2567 for (e = node->callers; e; e = e->next_caller)
2568 if (e->caller->thunk.thunk_p
2569 && (include_overwritable
2570 || cgraph_function_body_availability (e->caller)))
2571 if (cgraph_for_node_thunks_and_aliases (e->caller, callback, data,
2572 include_overwritable))
2573 return true;
2574 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref); i++)
2575 if (ref->use == IPA_REF_ALIAS)
2576 {
2577 struct cgraph_node *alias = ipa_ref_refering_node (ref);
2578 if (include_overwritable
2579 || cgraph_function_body_availability (alias) > AVAIL_OVERWRITABLE)
2580 if (cgraph_for_node_thunks_and_aliases (alias, callback, data,
2581 include_overwritable))
2582 return true;
2583 }
2584 return false;
2585 }
2586
2587 /* Call calback on NODE and aliases asociated to NODE.
2588 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
2589 skipped. */
2590
2591 bool
2592 cgraph_for_node_and_aliases (struct cgraph_node *node,
2593 bool (*callback) (struct cgraph_node *, void *),
2594 void *data,
2595 bool include_overwritable)
2596 {
2597 int i;
2598 struct ipa_ref *ref;
2599
2600 if (callback (node, data))
2601 return true;
2602 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref); i++)
2603 if (ref->use == IPA_REF_ALIAS)
2604 {
2605 struct cgraph_node *alias = ipa_ref_refering_node (ref);
2606 if (include_overwritable
2607 || cgraph_function_body_availability (alias) > AVAIL_OVERWRITABLE)
2608 if (cgraph_for_node_and_aliases (alias, callback, data,
2609 include_overwritable))
2610 return true;
2611 }
2612 return false;
2613 }
2614
2615 /* Worker to bring NODE local. */
2616
2617 static bool
2618 cgraph_make_node_local_1 (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2619 {
2620 gcc_checking_assert (cgraph_node_can_be_local_p (node));
2621 if (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
2622 {
2623 cgraph_make_decl_local (node->decl);
2624
2625 node->local.externally_visible = false;
2626 node->local.local = true;
2627 node->resolution = LDPR_PREVAILING_DEF_IRONLY;
2628 gcc_assert (cgraph_function_body_availability (node) == AVAIL_LOCAL);
2629 }
2630 return false;
2631 }
2632
2633 /* Bring NODE local. */
2634
2635 void
2636 cgraph_make_node_local (struct cgraph_node *node)
2637 {
2638 cgraph_for_node_thunks_and_aliases (node, cgraph_make_node_local_1,
2639 NULL, true);
2640 }
2641
2642 /* Worker to set nothrow flag. */
2643
2644 static bool
2645 cgraph_set_nothrow_flag_1 (struct cgraph_node *node, void *data)
2646 {
2647 struct cgraph_edge *e;
2648
2649 TREE_NOTHROW (node->decl) = data != NULL;
2650
2651 if (data != NULL)
2652 for (e = node->callers; e; e = e->next_caller)
2653 e->can_throw_external = false;
2654 return false;
2655 }
2656
2657 /* Set TREE_NOTHROW on NODE's decl and on aliases of NODE
2658 if any to NOTHROW. */
2659
2660 void
2661 cgraph_set_nothrow_flag (struct cgraph_node *node, bool nothrow)
2662 {
2663 cgraph_for_node_thunks_and_aliases (node, cgraph_set_nothrow_flag_1,
2664 (void *)(size_t)nothrow, false);
2665 }
2666
2667 /* Worker to set const flag. */
2668
2669 static bool
2670 cgraph_set_const_flag_1 (struct cgraph_node *node, void *data)
2671 {
2672 /* Static constructors and destructors without a side effect can be
2673 optimized out. */
2674 if (data && !((size_t)data & 2))
2675 {
2676 if (DECL_STATIC_CONSTRUCTOR (node->decl))
2677 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2678 if (DECL_STATIC_DESTRUCTOR (node->decl))
2679 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2680 }
2681 TREE_READONLY (node->decl) = data != NULL;
2682 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = ((size_t)data & 2) != 0;
2683 return false;
2684 }
2685
2686 /* Set TREE_READONLY on NODE's decl and on aliases of NODE
2687 if any to READONLY. */
2688
2689 void
2690 cgraph_set_const_flag (struct cgraph_node *node, bool readonly, bool looping)
2691 {
2692 cgraph_for_node_thunks_and_aliases (node, cgraph_set_const_flag_1,
2693 (void *)(size_t)(readonly + (int)looping * 2),
2694 false);
2695 }
2696
2697 /* Worker to set pure flag. */
2698
2699 static bool
2700 cgraph_set_pure_flag_1 (struct cgraph_node *node, void *data)
2701 {
2702 /* Static pureructors and destructors without a side effect can be
2703 optimized out. */
2704 if (data && !((size_t)data & 2))
2705 {
2706 if (DECL_STATIC_CONSTRUCTOR (node->decl))
2707 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2708 if (DECL_STATIC_DESTRUCTOR (node->decl))
2709 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2710 }
2711 DECL_PURE_P (node->decl) = data != NULL;
2712 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = ((size_t)data & 2) != 0;
2713 return false;
2714 }
2715
2716 /* Set DECL_PURE_P on NODE's decl and on aliases of NODE
2717 if any to PURE. */
2718
2719 void
2720 cgraph_set_pure_flag (struct cgraph_node *node, bool pure, bool looping)
2721 {
2722 cgraph_for_node_thunks_and_aliases (node, cgraph_set_pure_flag_1,
2723 (void *)(size_t)(pure + (int)looping * 2),
2724 false);
2725 }
2726
2727 /* Data used by cgraph_propagate_frequency. */
2728
2729 struct cgraph_propagate_frequency_data
2730 {
2731 bool maybe_unlikely_executed;
2732 bool maybe_executed_once;
2733 bool only_called_at_startup;
2734 bool only_called_at_exit;
2735 };
2736
2737 /* Worker for cgraph_propagate_frequency_1. */
2738
2739 static bool
2740 cgraph_propagate_frequency_1 (struct cgraph_node *node, void *data)
2741 {
2742 struct cgraph_propagate_frequency_data *d;
2743 struct cgraph_edge *edge;
2744
2745 d = (struct cgraph_propagate_frequency_data *)data;
2746 for (edge = node->callers;
2747 edge && (d->maybe_unlikely_executed || d->maybe_executed_once
2748 || d->only_called_at_startup || d->only_called_at_exit);
2749 edge = edge->next_caller)
2750 {
2751 if (edge->caller != node)
2752 {
2753 d->only_called_at_startup &= edge->caller->only_called_at_startup;
2754 /* It makes sense to put main() together with the static constructors.
2755 It will be executed for sure, but rest of functions called from
2756 main are definitely not at startup only. */
2757 if (MAIN_NAME_P (DECL_NAME (edge->caller->decl)))
2758 d->only_called_at_startup = 0;
2759 d->only_called_at_exit &= edge->caller->only_called_at_exit;
2760 }
2761 if (!edge->frequency)
2762 continue;
2763 switch (edge->caller->frequency)
2764 {
2765 case NODE_FREQUENCY_UNLIKELY_EXECUTED:
2766 break;
2767 case NODE_FREQUENCY_EXECUTED_ONCE:
2768 if (dump_file && (dump_flags & TDF_DETAILS))
2769 fprintf (dump_file, " Called by %s that is executed once\n",
2770 cgraph_node_name (edge->caller));
2771 d->maybe_unlikely_executed = false;
2772 if (inline_edge_summary (edge)->loop_depth)
2773 {
2774 d->maybe_executed_once = false;
2775 if (dump_file && (dump_flags & TDF_DETAILS))
2776 fprintf (dump_file, " Called in loop\n");
2777 }
2778 break;
2779 case NODE_FREQUENCY_HOT:
2780 case NODE_FREQUENCY_NORMAL:
2781 if (dump_file && (dump_flags & TDF_DETAILS))
2782 fprintf (dump_file, " Called by %s that is normal or hot\n",
2783 cgraph_node_name (edge->caller));
2784 d->maybe_unlikely_executed = false;
2785 d->maybe_executed_once = false;
2786 break;
2787 }
2788 }
2789 return edge != NULL;
2790 }
2791
2792 /* See if the frequency of NODE can be updated based on frequencies of its
2793 callers. */
2794 bool
2795 cgraph_propagate_frequency (struct cgraph_node *node)
2796 {
2797 struct cgraph_propagate_frequency_data d = {true, true, true, true};
2798 bool changed = false;
2799
2800 if (!node->local.local)
2801 return false;
2802 gcc_assert (node->analyzed);
2803 if (dump_file && (dump_flags & TDF_DETAILS))
2804 fprintf (dump_file, "Processing frequency %s\n", cgraph_node_name (node));
2805
2806 cgraph_for_node_and_aliases (node, cgraph_propagate_frequency_1, &d, true);
2807
2808 if ((d.only_called_at_startup && !d.only_called_at_exit)
2809 && !node->only_called_at_startup)
2810 {
2811 node->only_called_at_startup = true;
2812 if (dump_file)
2813 fprintf (dump_file, "Node %s promoted to only called at startup.\n",
2814 cgraph_node_name (node));
2815 changed = true;
2816 }
2817 if ((d.only_called_at_exit && !d.only_called_at_startup)
2818 && !node->only_called_at_exit)
2819 {
2820 node->only_called_at_exit = true;
2821 if (dump_file)
2822 fprintf (dump_file, "Node %s promoted to only called at exit.\n",
2823 cgraph_node_name (node));
2824 changed = true;
2825 }
2826 /* These come either from profile or user hints; never update them. */
2827 if (node->frequency == NODE_FREQUENCY_HOT
2828 || node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
2829 return changed;
2830 if (d.maybe_unlikely_executed)
2831 {
2832 node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED;
2833 if (dump_file)
2834 fprintf (dump_file, "Node %s promoted to unlikely executed.\n",
2835 cgraph_node_name (node));
2836 changed = true;
2837 }
2838 else if (d.maybe_executed_once && node->frequency != NODE_FREQUENCY_EXECUTED_ONCE)
2839 {
2840 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2841 if (dump_file)
2842 fprintf (dump_file, "Node %s promoted to executed once.\n",
2843 cgraph_node_name (node));
2844 changed = true;
2845 }
2846 return changed;
2847 }
2848
2849 /* Return true when NODE can not return or throw and thus
2850 it is safe to ignore its side effects for IPA analysis. */
2851
2852 bool
2853 cgraph_node_cannot_return (struct cgraph_node *node)
2854 {
2855 int flags = flags_from_decl_or_type (node->decl);
2856 if (!flag_exceptions)
2857 return (flags & ECF_NORETURN) != 0;
2858 else
2859 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2860 == (ECF_NORETURN | ECF_NOTHROW));
2861 }
2862
2863 /* Return true when call of E can not lead to return from caller
2864 and thus it is safe to ignore its side effects for IPA analysis
2865 when computing side effects of the caller.
2866 FIXME: We could actually mark all edges that have no reaching
2867 patch to EXIT_BLOCK_PTR or throw to get better results. */
2868 bool
2869 cgraph_edge_cannot_lead_to_return (struct cgraph_edge *e)
2870 {
2871 if (cgraph_node_cannot_return (e->caller))
2872 return true;
2873 if (e->indirect_unknown_callee)
2874 {
2875 int flags = e->indirect_info->ecf_flags;
2876 if (!flag_exceptions)
2877 return (flags & ECF_NORETURN) != 0;
2878 else
2879 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2880 == (ECF_NORETURN | ECF_NOTHROW));
2881 }
2882 else
2883 return cgraph_node_cannot_return (e->callee);
2884 }
2885
2886 /* Return true when function NODE can be removed from callgraph
2887 if all direct calls are eliminated. */
2888
2889 bool
2890 cgraph_can_remove_if_no_direct_calls_and_refs_p (struct cgraph_node *node)
2891 {
2892 gcc_assert (!node->global.inlined_to);
2893 /* Extern inlines can always go, we will use the external definition. */
2894 if (DECL_EXTERNAL (node->decl))
2895 return true;
2896 /* When function is needed, we can not remove it. */
2897 if (node->needed || node->reachable_from_other_partition)
2898 return false;
2899 if (DECL_STATIC_CONSTRUCTOR (node->decl)
2900 || DECL_STATIC_DESTRUCTOR (node->decl))
2901 return false;
2902 /* Only COMDAT functions can be removed if externally visible. */
2903 if (node->local.externally_visible
2904 && (!DECL_COMDAT (node->decl)
2905 || cgraph_used_from_object_file_p (node)))
2906 return false;
2907 return true;
2908 }
2909
2910 /* Worker for cgraph_can_remove_if_no_direct_calls_p. */
2911
2912 static bool
2913 nonremovable_p (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2914 {
2915 return !cgraph_can_remove_if_no_direct_calls_and_refs_p (node);
2916 }
2917
2918 /* Return true when function NODE and its aliases can be removed from callgraph
2919 if all direct calls are eliminated. */
2920
2921 bool
2922 cgraph_can_remove_if_no_direct_calls_p (struct cgraph_node *node)
2923 {
2924 /* Extern inlines can always go, we will use the external definition. */
2925 if (DECL_EXTERNAL (node->decl))
2926 return true;
2927 if (node->address_taken)
2928 return false;
2929 return !cgraph_for_node_and_aliases (node, nonremovable_p, NULL, true);
2930 }
2931
2932 /* Worker for cgraph_can_remove_if_no_direct_calls_p. */
2933
2934 static bool
2935 used_from_object_file_p (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2936 {
2937 return cgraph_used_from_object_file_p (node);
2938 }
2939
2940 /* Return true when function NODE can be expected to be removed
2941 from program when direct calls in this compilation unit are removed.
2942
2943 As a special case COMDAT functions are
2944 cgraph_can_remove_if_no_direct_calls_p while the are not
2945 cgraph_only_called_directly_p (it is possible they are called from other
2946 unit)
2947
2948 This function behaves as cgraph_only_called_directly_p because eliminating
2949 all uses of COMDAT function does not make it necessarily disappear from
2950 the program unless we are compiling whole program or we do LTO. In this
2951 case we know we win since dynamic linking will not really discard the
2952 linkonce section. */
2953
2954 bool
2955 cgraph_will_be_removed_from_program_if_no_direct_calls (struct cgraph_node *node)
2956 {
2957 gcc_assert (!node->global.inlined_to);
2958 if (cgraph_for_node_and_aliases (node, used_from_object_file_p, NULL, true))
2959 return false;
2960 if (!in_lto_p && !flag_whole_program)
2961 return cgraph_only_called_directly_p (node);
2962 else
2963 {
2964 if (DECL_EXTERNAL (node->decl))
2965 return true;
2966 return cgraph_can_remove_if_no_direct_calls_p (node);
2967 }
2968 }
2969
2970 /* Return true when RESOLUTION indicate that linker will use
2971 the symbol from non-LTO object files. */
2972
2973 bool
2974 resolution_used_from_other_file_p (enum ld_plugin_symbol_resolution resolution)
2975 {
2976 return (resolution == LDPR_PREVAILING_DEF
2977 || resolution == LDPR_PREEMPTED_REG
2978 || resolution == LDPR_RESOLVED_EXEC
2979 || resolution == LDPR_RESOLVED_DYN);
2980 }
2981
2982
2983 /* Return true when NODE is known to be used from other (non-LTO) object file.
2984 Known only when doing LTO via linker plugin. */
2985
2986 bool
2987 cgraph_used_from_object_file_p (struct cgraph_node *node)
2988 {
2989 gcc_assert (!node->global.inlined_to);
2990 if (!TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl))
2991 return false;
2992 if (resolution_used_from_other_file_p (node->resolution))
2993 return true;
2994 return false;
2995 }
2996
2997 /* Worker for cgraph_only_called_directly_p. */
2998
2999 static bool
3000 cgraph_not_only_called_directly_p_1 (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3001 {
3002 return !cgraph_only_called_directly_or_aliased_p (node);
3003 }
3004
3005 /* Return true when function NODE and all its aliases are only called
3006 directly.
3007 i.e. it is not externally visible, address was not taken and
3008 it is not used in any other non-standard way. */
3009
3010 bool
3011 cgraph_only_called_directly_p (struct cgraph_node *node)
3012 {
3013 gcc_assert (cgraph_function_or_thunk_node (node, NULL) == node);
3014 return !cgraph_for_node_and_aliases (node, cgraph_not_only_called_directly_p_1,
3015 NULL, true);
3016 }
3017
3018
3019 /* Collect all callers of NODE. Worker for collect_callers_of_node. */
3020
3021 static bool
3022 collect_callers_of_node_1 (struct cgraph_node *node, void *data)
3023 {
3024 VEC (cgraph_edge_p, heap) ** redirect_callers = (VEC (cgraph_edge_p, heap) **)data;
3025 struct cgraph_edge *cs;
3026 enum availability avail;
3027 cgraph_function_or_thunk_node (node, &avail);
3028
3029 if (avail > AVAIL_OVERWRITABLE)
3030 for (cs = node->callers; cs != NULL; cs = cs->next_caller)
3031 if (!cs->indirect_inlining_edge)
3032 VEC_safe_push (cgraph_edge_p, heap, *redirect_callers, cs);
3033 return false;
3034 }
3035
3036 /* Collect all callers of NODE and its aliases that are known to lead to NODE
3037 (i.e. are not overwritable). */
3038
3039 VEC (cgraph_edge_p, heap) *
3040 collect_callers_of_node (struct cgraph_node *node)
3041 {
3042 VEC (cgraph_edge_p, heap) * redirect_callers = NULL;
3043 cgraph_for_node_and_aliases (node, collect_callers_of_node_1,
3044 &redirect_callers, false);
3045 return redirect_callers;
3046 }
3047
3048 #include "gt-cgraph.h"