ipa: fix dumping with deleted multiversioning nodes
[gcc.git] / gcc / cgraph.h
1 /* Callgraph handling code.
2 Copyright (C) 2003-2017 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #ifndef GCC_CGRAPH_H
22 #define GCC_CGRAPH_H
23
24 #include "profile-count.h"
25 #include "ipa-ref.h"
26 #include "plugin-api.h"
27
28 class ipa_opt_pass_d;
29 typedef ipa_opt_pass_d *ipa_opt_pass;
30
31 /* Symbol table consists of functions and variables.
32 TODO: add labels and CONST_DECLs. */
33 enum symtab_type
34 {
35 SYMTAB_SYMBOL,
36 SYMTAB_FUNCTION,
37 SYMTAB_VARIABLE
38 };
39
40 /* Section names are stored as reference counted strings in GGC safe hashtable
41 (to make them survive through PCH). */
42
43 struct GTY((for_user)) section_hash_entry
44 {
45 int ref_count;
46 char *name; /* As long as this datastructure stays in GGC, we can not put
47 string at the tail of structure of GGC dies in horrible
48 way */
49 };
50
51 struct section_name_hasher : ggc_ptr_hash<section_hash_entry>
52 {
53 typedef const char *compare_type;
54
55 static hashval_t hash (section_hash_entry *);
56 static bool equal (section_hash_entry *, const char *);
57 };
58
59 enum availability
60 {
61 /* Not yet set by cgraph_function_body_availability. */
62 AVAIL_UNSET,
63 /* Function body/variable initializer is unknown. */
64 AVAIL_NOT_AVAILABLE,
65 /* Function body/variable initializer is known but might be replaced
66 by a different one from other compilation unit and thus needs to
67 be dealt with a care. Like AVAIL_NOT_AVAILABLE it can have
68 arbitrary side effects on escaping variables and functions, while
69 like AVAILABLE it might access static variables. */
70 AVAIL_INTERPOSABLE,
71 /* Function body/variable initializer is known and will be used in final
72 program. */
73 AVAIL_AVAILABLE,
74 /* Function body/variable initializer is known and all it's uses are
75 explicitly visible within current unit (ie it's address is never taken and
76 it is not exported to other units). Currently used only for functions. */
77 AVAIL_LOCAL
78 };
79
80 /* Classification of symbols WRT partitioning. */
81 enum symbol_partitioning_class
82 {
83 /* External declarations are ignored by partitioning algorithms and they are
84 added into the boundary later via compute_ltrans_boundary. */
85 SYMBOL_EXTERNAL,
86 /* Partitioned symbols are pur into one of partitions. */
87 SYMBOL_PARTITION,
88 /* Duplicated symbols (such as comdat or constant pool references) are
89 copied into every node needing them via add_symbol_to_partition. */
90 SYMBOL_DUPLICATE
91 };
92
93 /* Base of all entries in the symbol table.
94 The symtab_node is inherited by cgraph and varpol nodes. */
95 class GTY((desc ("%h.type"), tag ("SYMTAB_SYMBOL"),
96 chain_next ("%h.next"), chain_prev ("%h.previous")))
97 symtab_node
98 {
99 public:
100 /* Return name. */
101 const char *name () const;
102
103 /* Return dump name. */
104 const char *dump_name () const;
105
106 /* Return asm name. */
107 const char *asm_name () const;
108
109 /* Return dump name with assembler name. */
110 const char *dump_asm_name () const;
111
112 /* Add node into symbol table. This function is not used directly, but via
113 cgraph/varpool node creation routines. */
114 void register_symbol (void);
115
116 /* Remove symbol from symbol table. */
117 void remove (void);
118
119 /* Dump symtab node to F. */
120 void dump (FILE *f);
121
122 /* Dump symtab node to stderr. */
123 void DEBUG_FUNCTION debug (void);
124
125 /* Verify consistency of node. */
126 void DEBUG_FUNCTION verify (void);
127
128 /* Return ipa reference from this symtab_node to
129 REFERED_NODE or REFERED_VARPOOL_NODE. USE_TYPE specify type
130 of the use and STMT the statement (if it exists). */
131 ipa_ref *create_reference (symtab_node *referred_node,
132 enum ipa_ref_use use_type);
133
134 /* Return ipa reference from this symtab_node to
135 REFERED_NODE or REFERED_VARPOOL_NODE. USE_TYPE specify type
136 of the use and STMT the statement (if it exists). */
137 ipa_ref *create_reference (symtab_node *referred_node,
138 enum ipa_ref_use use_type, gimple *stmt);
139
140 /* If VAL is a reference to a function or a variable, add a reference from
141 this symtab_node to the corresponding symbol table node. Return the new
142 reference or NULL if none was created. */
143 ipa_ref *maybe_create_reference (tree val, gimple *stmt);
144
145 /* Clone all references from symtab NODE to this symtab_node. */
146 void clone_references (symtab_node *node);
147
148 /* Remove all stmt references in non-speculative references.
149 Those are not maintained during inlining & clonning.
150 The exception are speculative references that are updated along
151 with callgraph edges associated with them. */
152 void clone_referring (symtab_node *node);
153
154 /* Clone reference REF to this symtab_node and set its stmt to STMT. */
155 ipa_ref *clone_reference (ipa_ref *ref, gimple *stmt);
156
157 /* Find the structure describing a reference to REFERRED_NODE
158 and associated with statement STMT. */
159 ipa_ref *find_reference (symtab_node *referred_node, gimple *stmt,
160 unsigned int lto_stmt_uid);
161
162 /* Remove all references that are associated with statement STMT. */
163 void remove_stmt_references (gimple *stmt);
164
165 /* Remove all stmt references in non-speculative references.
166 Those are not maintained during inlining & clonning.
167 The exception are speculative references that are updated along
168 with callgraph edges associated with them. */
169 void clear_stmts_in_references (void);
170
171 /* Remove all references in ref list. */
172 void remove_all_references (void);
173
174 /* Remove all referring items in ref list. */
175 void remove_all_referring (void);
176
177 /* Dump references in ref list to FILE. */
178 void dump_references (FILE *file);
179
180 /* Dump referring in list to FILE. */
181 void dump_referring (FILE *);
182
183 /* Get number of references for this node. */
184 inline unsigned num_references (void)
185 {
186 return ref_list.references ? ref_list.references->length () : 0;
187 }
188
189 /* Iterates I-th reference in the list, REF is also set. */
190 ipa_ref *iterate_reference (unsigned i, ipa_ref *&ref);
191
192 /* Iterates I-th referring item in the list, REF is also set. */
193 ipa_ref *iterate_referring (unsigned i, ipa_ref *&ref);
194
195 /* Iterates I-th referring alias item in the list, REF is also set. */
196 ipa_ref *iterate_direct_aliases (unsigned i, ipa_ref *&ref);
197
198 /* Return true if symtab node and TARGET represents
199 semantically equivalent symbols. */
200 bool semantically_equivalent_p (symtab_node *target);
201
202 /* Classify symbol symtab node for partitioning. */
203 enum symbol_partitioning_class get_partitioning_class (void);
204
205 /* Return comdat group. */
206 tree get_comdat_group ()
207 {
208 return x_comdat_group;
209 }
210
211 /* Return comdat group as identifier_node. */
212 tree get_comdat_group_id ()
213 {
214 if (x_comdat_group && TREE_CODE (x_comdat_group) != IDENTIFIER_NODE)
215 x_comdat_group = DECL_ASSEMBLER_NAME (x_comdat_group);
216 return x_comdat_group;
217 }
218
219 /* Set comdat group. */
220 void set_comdat_group (tree group)
221 {
222 gcc_checking_assert (!group || TREE_CODE (group) == IDENTIFIER_NODE
223 || DECL_P (group));
224 x_comdat_group = group;
225 }
226
227 /* Return section as string. */
228 const char * get_section ()
229 {
230 if (!x_section)
231 return NULL;
232 return x_section->name;
233 }
234
235 /* Remove node from same comdat group. */
236 void remove_from_same_comdat_group (void);
237
238 /* Add this symtab_node to the same comdat group that OLD is in. */
239 void add_to_same_comdat_group (symtab_node *old_node);
240
241 /* Dissolve the same_comdat_group list in which NODE resides. */
242 void dissolve_same_comdat_group_list (void);
243
244 /* Return true when symtab_node is known to be used from other (non-LTO)
245 object file. Known only when doing LTO via linker plugin. */
246 bool used_from_object_file_p (void);
247
248 /* Walk the alias chain to return the symbol NODE is alias of.
249 If NODE is not an alias, return NODE.
250 When AVAILABILITY is non-NULL, get minimal availability in the chain.
251 When REF is non-NULL, assume that reference happens in symbol REF
252 when determining the availability. */
253 symtab_node *ultimate_alias_target (enum availability *avail = NULL,
254 struct symtab_node *ref = NULL);
255
256 /* Return next reachable static symbol with initializer after NODE. */
257 inline symtab_node *next_defined_symbol (void);
258
259 /* Add reference recording that symtab node is alias of TARGET.
260 If TRANSPARENT is true make the alias to be transparent alias.
261 The function can fail in the case of aliasing cycles; in this case
262 it returns false. */
263 bool resolve_alias (symtab_node *target, bool transparent = false);
264
265 /* C++ FE sometimes change linkage flags after producing same
266 body aliases. */
267 void fixup_same_cpp_alias_visibility (symtab_node *target);
268
269 /* Call callback on symtab node and aliases associated to this node.
270 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
271 skipped. */
272 bool call_for_symbol_and_aliases (bool (*callback) (symtab_node *, void *),
273 void *data,
274 bool include_overwrite);
275
276 /* If node can not be interposable by static or dynamic linker to point to
277 different definition, return this symbol. Otherwise look for alias with
278 such property and if none exists, introduce new one. */
279 symtab_node *noninterposable_alias (void);
280
281 /* Return node that alias is aliasing. */
282 inline symtab_node *get_alias_target (void);
283
284 /* Set section for symbol and its aliases. */
285 void set_section (const char *section);
286
287 /* Set section, do not recurse into aliases.
288 When one wants to change section of symbol and its aliases,
289 use set_section. */
290 void set_section_for_node (const char *section);
291
292 /* Set initialization priority to PRIORITY. */
293 void set_init_priority (priority_type priority);
294
295 /* Return the initialization priority. */
296 priority_type get_init_priority ();
297
298 /* Return availability of NODE when referenced from REF. */
299 enum availability get_availability (symtab_node *ref = NULL);
300
301 /* Return true if NODE binds to current definition in final executable
302 when referenced from REF. If REF is NULL return conservative value
303 for any reference. */
304 bool binds_to_current_def_p (symtab_node *ref = NULL);
305
306 /* Make DECL local. */
307 void make_decl_local (void);
308
309 /* Copy visibility from N. */
310 void copy_visibility_from (symtab_node *n);
311
312 /* Return desired alignment of the definition. This is NOT alignment useful
313 to access THIS, because THIS may be interposable and DECL_ALIGN should
314 be used instead. It however must be guaranteed when output definition
315 of THIS. */
316 unsigned int definition_alignment ();
317
318 /* Return true if alignment can be increased. */
319 bool can_increase_alignment_p ();
320
321 /* Increase alignment of symbol to ALIGN. */
322 void increase_alignment (unsigned int align);
323
324 /* Return true if list contains an alias. */
325 bool has_aliases_p (void);
326
327 /* Return true when the symbol is real symbol, i.e. it is not inline clone
328 or abstract function kept for debug info purposes only. */
329 bool real_symbol_p (void);
330
331 /* Determine if symbol declaration is needed. That is, visible to something
332 either outside this translation unit, something magic in the system
333 configury. This function is used just during symbol creation. */
334 bool needed_p (void);
335
336 /* Return true if this symbol is a function from the C frontend specified
337 directly in RTL form (with "__RTL"). */
338 bool native_rtl_p () const;
339
340 /* Return true when there are references to the node. */
341 bool referred_to_p (bool include_self = true);
342
343 /* Return true if symbol can be discarded by linker from the binary.
344 Assume that symbol is used (so there is no need to take into account
345 garbage collecting linkers)
346
347 This can happen for comdats, commons and weaks when they are previaled
348 by other definition at static linking time. */
349 inline bool
350 can_be_discarded_p (void)
351 {
352 return (DECL_EXTERNAL (decl)
353 || ((get_comdat_group ()
354 || DECL_COMMON (decl)
355 || (DECL_SECTION_NAME (decl) && DECL_WEAK (decl)))
356 && ((resolution != LDPR_PREVAILING_DEF
357 && resolution != LDPR_PREVAILING_DEF_IRONLY_EXP)
358 || flag_incremental_link)
359 && resolution != LDPR_PREVAILING_DEF_IRONLY));
360 }
361
362 /* Return true if NODE is local to a particular COMDAT group, and must not
363 be named from outside the COMDAT. This is used for C++ decloned
364 constructors. */
365 inline bool comdat_local_p (void)
366 {
367 return (same_comdat_group && !TREE_PUBLIC (decl));
368 }
369
370 /* Return true if ONE and TWO are part of the same COMDAT group. */
371 inline bool in_same_comdat_group_p (symtab_node *target);
372
373 /* Return true if symbol is known to be nonzero. */
374 bool nonzero_address ();
375
376 /* Return 0 if symbol is known to have different address than S2,
377 Return 1 if symbol is known to have same address as S2,
378 return 2 otherwise.
379
380 If MEMORY_ACCESSED is true, assume that both memory pointer to THIS
381 and S2 is going to be accessed. This eliminates the situations when
382 either THIS or S2 is NULL and is seful for comparing bases when deciding
383 about memory aliasing. */
384 int equal_address_to (symtab_node *s2, bool memory_accessed = false);
385
386 /* Return true if symbol's address may possibly be compared to other
387 symbol's address. */
388 bool address_matters_p ();
389
390 /* Return true if NODE's address can be compared. This use properties
391 of NODE only and does not look if the address is actually taken in
392 interesting way. For that use ADDRESS_MATTERS_P instead. */
393 bool address_can_be_compared_p (void);
394
395 /* Return symbol table node associated with DECL, if any,
396 and NULL otherwise. */
397 static inline symtab_node *get (const_tree decl)
398 {
399 /* Check that we are called for sane type of object - functions
400 and static or external variables. */
401 gcc_checking_assert (TREE_CODE (decl) == FUNCTION_DECL
402 || (TREE_CODE (decl) == VAR_DECL
403 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)
404 || in_lto_p)));
405 /* Check that the mapping is sane - perhaps this check can go away,
406 but at the moment frontends tends to corrupt the mapping by calling
407 memcpy/memset on the tree nodes. */
408 gcc_checking_assert (!decl->decl_with_vis.symtab_node
409 || decl->decl_with_vis.symtab_node->decl == decl);
410 return decl->decl_with_vis.symtab_node;
411 }
412
413 /* Try to find a symtab node for declaration DECL and if it does not
414 exist or if it corresponds to an inline clone, create a new one. */
415 static inline symtab_node * get_create (tree node);
416
417 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
418 Return NULL if there's no such node. */
419 static symtab_node *get_for_asmname (const_tree asmname);
420
421 /* Verify symbol table for internal consistency. */
422 static DEBUG_FUNCTION void verify_symtab_nodes (void);
423
424 /* Perform internal consistency checks, if they are enabled. */
425 static inline void checking_verify_symtab_nodes (void);
426
427 /* Type of the symbol. */
428 ENUM_BITFIELD (symtab_type) type : 8;
429
430 /* The symbols resolution. */
431 ENUM_BITFIELD (ld_plugin_symbol_resolution) resolution : 8;
432
433 /*** Flags representing the symbol type. ***/
434
435 /* True when symbol corresponds to a definition in current unit.
436 set via finalize_function or finalize_decl */
437 unsigned definition : 1;
438 /* True when symbol is an alias.
439 Set by ssemble_alias. */
440 unsigned alias : 1;
441 /* When true the alias is translated into its target symbol either by GCC
442 or assembler (it also may just be a duplicate declaration of the same
443 linker name).
444
445 Currently transparent aliases come in three different flavors
446 - aliases having the same assembler name as their target (aka duplicated
447 declarations). In this case the assembler names compare via
448 assembler_names_equal_p and weakref is false
449 - aliases that are renamed at a time being output to final file
450 by varasm.c. For those DECL_ASSEMBLER_NAME have
451 IDENTIFIER_TRANSPARENT_ALIAS set and thus also their assembler
452 name must be unique.
453 Weakrefs belong to this cateogry when we target assembler without
454 .weakref directive.
455 - weakrefs that are renamed by assembler via .weakref directive.
456 In this case the alias may or may not be definition (depending if
457 target declaration was seen by the compiler), weakref is set.
458 Unless we are before renaming statics, assembler names are different.
459
460 Given that we now support duplicate declarations, the second option is
461 redundant and will be removed. */
462 unsigned transparent_alias : 1;
463 /* True when alias is a weakref. */
464 unsigned weakref : 1;
465 /* C++ frontend produce same body aliases and extra name aliases for
466 virtual functions and vtables that are obviously equivalent.
467 Those aliases are bit special, especially because C++ frontend
468 visibility code is so ugly it can not get them right at first time
469 and their visibility needs to be copied from their "masters" at
470 the end of parsing. */
471 unsigned cpp_implicit_alias : 1;
472 /* Set once the definition was analyzed. The list of references and
473 other properties are built during analysis. */
474 unsigned analyzed : 1;
475 /* Set for write-only variables. */
476 unsigned writeonly : 1;
477 /* Visibility of symbol was used for further optimization; do not
478 permit further changes. */
479 unsigned refuse_visibility_changes : 1;
480
481 /*** Visibility and linkage flags. ***/
482
483 /* Set when function is visible by other units. */
484 unsigned externally_visible : 1;
485 /* Don't reorder to other symbols having this set. */
486 unsigned no_reorder : 1;
487 /* The symbol will be assumed to be used in an invisible way (like
488 by an toplevel asm statement). */
489 unsigned force_output : 1;
490 /* Like FORCE_OUTPUT, but in the case it is ABI requiring the symbol to be
491 exported. Unlike FORCE_OUTPUT this flag gets cleared to symbols promoted
492 to static and it does not inhibit optimization. */
493 unsigned forced_by_abi : 1;
494 /* True when the name is known to be unique and thus it does not need mangling. */
495 unsigned unique_name : 1;
496 /* Specify whether the section was set by user or by
497 compiler via -ffunction-sections. */
498 unsigned implicit_section : 1;
499 /* True when body and other characteristics have been removed by
500 symtab_remove_unreachable_nodes. */
501 unsigned body_removed : 1;
502
503 /*** WHOPR Partitioning flags.
504 These flags are used at ltrans stage when only part of the callgraph is
505 available. ***/
506
507 /* Set when variable is used from other LTRANS partition. */
508 unsigned used_from_other_partition : 1;
509 /* Set when function is available in the other LTRANS partition.
510 During WPA output it is used to mark nodes that are present in
511 multiple partitions. */
512 unsigned in_other_partition : 1;
513
514
515
516 /*** other flags. ***/
517
518 /* Set when symbol has address taken. */
519 unsigned address_taken : 1;
520 /* Set when init priority is set. */
521 unsigned in_init_priority_hash : 1;
522
523 /* Set when symbol needs to be streamed into LTO bytecode for LTO, or in case
524 of offloading, for separate compilation for a different target. */
525 unsigned need_lto_streaming : 1;
526
527 /* Set when symbol can be streamed into bytecode for offloading. */
528 unsigned offloadable : 1;
529
530
531 /* Ordering of all symtab entries. */
532 int order;
533
534 /* Declaration representing the symbol. */
535 tree decl;
536
537 /* Linked list of symbol table entries starting with symtab_nodes. */
538 symtab_node *next;
539 symtab_node *previous;
540
541 /* Linked list of symbols with the same asm name. There may be multiple
542 entries for single symbol name during LTO, because symbols are renamed
543 only after partitioning.
544
545 Because inline clones are kept in the assembler name has, they also produce
546 duplicate entries.
547
548 There are also several long standing bugs where frontends and builtin
549 code produce duplicated decls. */
550 symtab_node *next_sharing_asm_name;
551 symtab_node *previous_sharing_asm_name;
552
553 /* Circular list of nodes in the same comdat group if non-NULL. */
554 symtab_node *same_comdat_group;
555
556 /* Vectors of referring and referenced entities. */
557 ipa_ref_list ref_list;
558
559 /* Alias target. May be either DECL pointer or ASSEMBLER_NAME pointer
560 depending to what was known to frontend on the creation time.
561 Once alias is resolved, this pointer become NULL. */
562 tree alias_target;
563
564 /* File stream where this node is being written to. */
565 struct lto_file_decl_data * lto_file_data;
566
567 PTR GTY ((skip)) aux;
568
569 /* Comdat group the symbol is in. Can be private if GGC allowed that. */
570 tree x_comdat_group;
571
572 /* Section name. Again can be private, if allowed. */
573 section_hash_entry *x_section;
574
575 protected:
576 /* Dump base fields of symtab nodes to F. Not to be used directly. */
577 void dump_base (FILE *);
578
579 /* Verify common part of symtab node. */
580 bool DEBUG_FUNCTION verify_base (void);
581
582 /* Remove node from symbol table. This function is not used directly, but via
583 cgraph/varpool node removal routines. */
584 void unregister (void);
585
586 /* Return the initialization and finalization priority information for
587 DECL. If there is no previous priority information, a freshly
588 allocated structure is returned. */
589 struct symbol_priority_map *priority_info (void);
590
591 /* Worker for call_for_symbol_and_aliases_1. */
592 bool call_for_symbol_and_aliases_1 (bool (*callback) (symtab_node *, void *),
593 void *data,
594 bool include_overwrite);
595 private:
596 /* Worker for set_section. */
597 static bool set_section (symtab_node *n, void *s);
598
599 /* Worker for symtab_resolve_alias. */
600 static bool set_implicit_section (symtab_node *n, void *);
601
602 /* Worker searching noninterposable alias. */
603 static bool noninterposable_alias (symtab_node *node, void *data);
604
605 /* Worker for ultimate_alias_target. */
606 symtab_node *ultimate_alias_target_1 (enum availability *avail = NULL,
607 symtab_node *ref = NULL);
608
609 /* Get dump name with normal or assembly name. */
610 const char *get_dump_name (bool asm_name_p) const;
611 };
612
613 inline void
614 symtab_node::checking_verify_symtab_nodes (void)
615 {
616 if (flag_checking)
617 symtab_node::verify_symtab_nodes ();
618 }
619
620 /* Walk all aliases for NODE. */
621 #define FOR_EACH_ALIAS(node, alias) \
622 for (unsigned x_i = 0; node->iterate_direct_aliases (x_i, alias); x_i++)
623
624 /* This is the information that is put into the cgraph local structure
625 to recover a function. */
626 struct lto_file_decl_data;
627
628 extern const char * const cgraph_availability_names[];
629 extern const char * const ld_plugin_symbol_resolution_names[];
630 extern const char * const tls_model_names[];
631
632 /* Sub-structure of cgraph_node. Holds information about thunk, used only for
633 same body aliases.
634
635 Thunks are basically wrappers around methods which are introduced in case
636 of multiple inheritance in order to adjust the value of the "this" pointer
637 or of the returned value.
638
639 In the case of this-adjusting thunks, each back-end can override the
640 can_output_mi_thunk/output_mi_thunk target hooks to generate a minimal thunk
641 (with a tail call for instance) directly as assembly. For the default hook
642 or for the case where the can_output_mi_thunk hooks return false, the thunk
643 is gimplified and lowered using the regular machinery. */
644
645 struct GTY(()) cgraph_thunk_info {
646 /* Offset used to adjust "this". */
647 HOST_WIDE_INT fixed_offset;
648
649 /* Offset in the virtual table to get the offset to adjust "this". Valid iff
650 VIRTUAL_OFFSET_P is true. */
651 HOST_WIDE_INT virtual_value;
652
653 /* Thunk target, i.e. the method that this thunk wraps. Depending on the
654 TARGET_USE_LOCAL_THUNK_ALIAS_P macro, this may have to be a new alias. */
655 tree alias;
656
657 /* Nonzero for a "this" adjusting thunk and zero for a result adjusting
658 thunk. */
659 bool this_adjusting;
660
661 /* If true, this thunk is what we call a virtual thunk. In this case:
662 * for this-adjusting thunks, after the FIXED_OFFSET based adjustment is
663 done, add to the result the offset found in the vtable at:
664 vptr + VIRTUAL_VALUE
665 * for result-adjusting thunks, the FIXED_OFFSET adjustment is done after
666 the virtual one. */
667 bool virtual_offset_p;
668
669 /* ??? True for special kind of thunks, seems related to instrumentation. */
670 bool add_pointer_bounds_args;
671
672 /* Set to true when alias node (the cgraph_node to which this struct belong)
673 is a thunk. Access to any other fields is invalid if this is false. */
674 bool thunk_p;
675 };
676
677 /* Information about the function collected locally.
678 Available after function is analyzed. */
679
680 struct GTY(()) cgraph_local_info {
681 /* Set when function is visible in current compilation unit only and
682 its address is never taken. */
683 unsigned local : 1;
684
685 /* False when there is something makes versioning impossible. */
686 unsigned versionable : 1;
687
688 /* False when function calling convention and signature can not be changed.
689 This is the case when __builtin_apply_args is used. */
690 unsigned can_change_signature : 1;
691
692 /* True when the function has been originally extern inline, but it is
693 redefined now. */
694 unsigned redefined_extern_inline : 1;
695
696 /* True if the function may enter serial irrevocable mode. */
697 unsigned tm_may_enter_irr : 1;
698 };
699
700 /* Information about the function that needs to be computed globally
701 once compilation is finished. Available only with -funit-at-a-time. */
702
703 struct GTY(()) cgraph_global_info {
704 /* For inline clones this points to the function they will be
705 inlined into. */
706 cgraph_node *inlined_to;
707 };
708
709 /* Represent which DECL tree (or reference to such tree)
710 will be replaced by another tree while versioning. */
711 struct GTY(()) ipa_replace_map
712 {
713 /* The tree that will be replaced. */
714 tree old_tree;
715 /* The new (replacing) tree. */
716 tree new_tree;
717 /* Parameter number to replace, when old_tree is NULL. */
718 int parm_num;
719 /* True when a substitution should be done, false otherwise. */
720 bool replace_p;
721 /* True when we replace a reference to old_tree. */
722 bool ref_p;
723 };
724
725 struct GTY(()) cgraph_clone_info
726 {
727 vec<ipa_replace_map *, va_gc> *tree_map;
728 bitmap args_to_skip;
729 bitmap combined_args_to_skip;
730 };
731
732 enum cgraph_simd_clone_arg_type
733 {
734 SIMD_CLONE_ARG_TYPE_VECTOR,
735 SIMD_CLONE_ARG_TYPE_UNIFORM,
736 /* These are only for integer/pointer arguments passed by value. */
737 SIMD_CLONE_ARG_TYPE_LINEAR_CONSTANT_STEP,
738 SIMD_CLONE_ARG_TYPE_LINEAR_VARIABLE_STEP,
739 /* These 6 are only for reference type arguments or arguments passed
740 by reference. */
741 SIMD_CLONE_ARG_TYPE_LINEAR_REF_CONSTANT_STEP,
742 SIMD_CLONE_ARG_TYPE_LINEAR_REF_VARIABLE_STEP,
743 SIMD_CLONE_ARG_TYPE_LINEAR_UVAL_CONSTANT_STEP,
744 SIMD_CLONE_ARG_TYPE_LINEAR_UVAL_VARIABLE_STEP,
745 SIMD_CLONE_ARG_TYPE_LINEAR_VAL_CONSTANT_STEP,
746 SIMD_CLONE_ARG_TYPE_LINEAR_VAL_VARIABLE_STEP,
747 SIMD_CLONE_ARG_TYPE_MASK
748 };
749
750 /* Function arguments in the original function of a SIMD clone.
751 Supplementary data for `struct simd_clone'. */
752
753 struct GTY(()) cgraph_simd_clone_arg {
754 /* Original function argument as it originally existed in
755 DECL_ARGUMENTS. */
756 tree orig_arg;
757
758 /* orig_arg's function (or for extern functions type from
759 TYPE_ARG_TYPES). */
760 tree orig_type;
761
762 /* If argument is a vector, this holds the vector version of
763 orig_arg that after adjusting the argument types will live in
764 DECL_ARGUMENTS. Otherwise, this is NULL.
765
766 This basically holds:
767 vector(simdlen) __typeof__(orig_arg) new_arg. */
768 tree vector_arg;
769
770 /* vector_arg's type (or for extern functions new vector type. */
771 tree vector_type;
772
773 /* If argument is a vector, this holds the array where the simd
774 argument is held while executing the simd clone function. This
775 is a local variable in the cloned function. Its content is
776 copied from vector_arg upon entry to the clone.
777
778 This basically holds:
779 __typeof__(orig_arg) simd_array[simdlen]. */
780 tree simd_array;
781
782 /* A SIMD clone's argument can be either linear (constant or
783 variable), uniform, or vector. */
784 enum cgraph_simd_clone_arg_type arg_type;
785
786 /* For arg_type SIMD_CLONE_ARG_TYPE_LINEAR_*CONSTANT_STEP this is
787 the constant linear step, if arg_type is
788 SIMD_CLONE_ARG_TYPE_LINEAR_*VARIABLE_STEP, this is index of
789 the uniform argument holding the step, otherwise 0. */
790 HOST_WIDE_INT linear_step;
791
792 /* Variable alignment if available, otherwise 0. */
793 unsigned int alignment;
794 };
795
796 /* Specific data for a SIMD function clone. */
797
798 struct GTY(()) cgraph_simd_clone {
799 /* Number of words in the SIMD lane associated with this clone. */
800 unsigned int simdlen;
801
802 /* Number of annotated function arguments in `args'. This is
803 usually the number of named arguments in FNDECL. */
804 unsigned int nargs;
805
806 /* Max hardware vector size in bits for integral vectors. */
807 unsigned int vecsize_int;
808
809 /* Max hardware vector size in bits for floating point vectors. */
810 unsigned int vecsize_float;
811
812 /* Machine mode of the mask argument(s), if they are to be passed
813 as bitmasks in integer argument(s). VOIDmode if masks are passed
814 as vectors of characteristic type. */
815 machine_mode mask_mode;
816
817 /* The mangling character for a given vector size. This is used
818 to determine the ISA mangling bit as specified in the Intel
819 Vector ABI. */
820 unsigned char vecsize_mangle;
821
822 /* True if this is the masked, in-branch version of the clone,
823 otherwise false. */
824 unsigned int inbranch : 1;
825
826 /* True if this is a Cilk Plus variant. */
827 unsigned int cilk_elemental : 1;
828
829 /* Doubly linked list of SIMD clones. */
830 cgraph_node *prev_clone, *next_clone;
831
832 /* Original cgraph node the SIMD clones were created for. */
833 cgraph_node *origin;
834
835 /* Annotated function arguments for the original function. */
836 cgraph_simd_clone_arg GTY((length ("%h.nargs"))) args[1];
837 };
838
839 /* Function Multiversioning info. */
840 struct GTY((for_user)) cgraph_function_version_info {
841 /* The cgraph_node for which the function version info is stored. */
842 cgraph_node *this_node;
843 /* Chains all the semantically identical function versions. The
844 first function in this chain is the version_info node of the
845 default function. */
846 cgraph_function_version_info *prev;
847 /* If this version node corresponds to a dispatcher for function
848 versions, this points to the version info node of the default
849 function, the first node in the chain. */
850 cgraph_function_version_info *next;
851 /* If this node corresponds to a function version, this points
852 to the dispatcher function decl, which is the function that must
853 be called to execute the right function version at run-time.
854
855 If this cgraph node is a dispatcher (if dispatcher_function is
856 true, in the cgraph_node struct) for function versions, this
857 points to resolver function, which holds the function body of the
858 dispatcher. The dispatcher decl is an alias to the resolver
859 function decl. */
860 tree dispatcher_resolver;
861 };
862
863 #define DEFCIFCODE(code, type, string) CIF_ ## code,
864 /* Reasons for inlining failures. */
865
866 enum cgraph_inline_failed_t {
867 #include "cif-code.def"
868 CIF_N_REASONS
869 };
870
871 enum cgraph_inline_failed_type_t
872 {
873 CIF_FINAL_NORMAL = 0,
874 CIF_FINAL_ERROR
875 };
876
877 struct cgraph_edge;
878
879 struct cgraph_edge_hasher : ggc_ptr_hash<cgraph_edge>
880 {
881 typedef gimple *compare_type;
882
883 static hashval_t hash (cgraph_edge *);
884 static hashval_t hash (gimple *);
885 static bool equal (cgraph_edge *, gimple *);
886 };
887
888 /* The cgraph data structure.
889 Each function decl has assigned cgraph_node listing callees and callers. */
890
891 struct GTY((tag ("SYMTAB_FUNCTION"))) cgraph_node : public symtab_node {
892 public:
893 /* Remove the node from cgraph and all inline clones inlined into it.
894 Skip however removal of FORBIDDEN_NODE and return true if it needs to be
895 removed. This allows to call the function from outer loop walking clone
896 tree. */
897 bool remove_symbol_and_inline_clones (cgraph_node *forbidden_node = NULL);
898
899 /* Record all references from cgraph_node that are taken
900 in statement STMT. */
901 void record_stmt_references (gimple *stmt);
902
903 /* Like cgraph_set_call_stmt but walk the clone tree and update all
904 clones sharing the same function body.
905 When WHOLE_SPECULATIVE_EDGES is true, all three components of
906 speculative edge gets updated. Otherwise we update only direct
907 call. */
908 void set_call_stmt_including_clones (gimple *old_stmt, gcall *new_stmt,
909 bool update_speculative = true);
910
911 /* Walk the alias chain to return the function cgraph_node is alias of.
912 Walk through thunk, too.
913 When AVAILABILITY is non-NULL, get minimal availability in the chain.
914 When REF is non-NULL, assume that reference happens in symbol REF
915 when determining the availability. */
916 cgraph_node *function_symbol (enum availability *avail = NULL,
917 struct symtab_node *ref = NULL);
918
919 /* Walk the alias chain to return the function cgraph_node is alias of.
920 Walk through non virtual thunks, too. Thus we return either a function
921 or a virtual thunk node.
922 When AVAILABILITY is non-NULL, get minimal availability in the chain.
923 When REF is non-NULL, assume that reference happens in symbol REF
924 when determining the availability. */
925 cgraph_node *function_or_virtual_thunk_symbol
926 (enum availability *avail = NULL,
927 struct symtab_node *ref = NULL);
928
929 /* Create node representing clone of N executed COUNT times. Decrease
930 the execution counts from original node too.
931 The new clone will have decl set to DECL that may or may not be the same
932 as decl of N.
933
934 When UPDATE_ORIGINAL is true, the counts are subtracted from the original
935 function's profile to reflect the fact that part of execution is handled
936 by node.
937 When CALL_DUPLICATOIN_HOOK is true, the ipa passes are acknowledged about
938 the new clone. Otherwise the caller is responsible for doing so later.
939
940 If the new node is being inlined into another one, NEW_INLINED_TO should be
941 the outline function the new one is (even indirectly) inlined to.
942 All hooks will see this in node's global.inlined_to, when invoked.
943 Can be NULL if the node is not inlined. SUFFIX is string that is appended
944 to the original name. */
945 cgraph_node *create_clone (tree decl, profile_count count, int freq,
946 bool update_original,
947 vec<cgraph_edge *> redirect_callers,
948 bool call_duplication_hook,
949 cgraph_node *new_inlined_to,
950 bitmap args_to_skip, const char *suffix = NULL);
951
952 /* Create callgraph node clone with new declaration. The actual body will
953 be copied later at compilation stage. */
954 cgraph_node *create_virtual_clone (vec<cgraph_edge *> redirect_callers,
955 vec<ipa_replace_map *, va_gc> *tree_map,
956 bitmap args_to_skip, const char * suffix);
957
958 /* cgraph node being removed from symbol table; see if its entry can be
959 replaced by other inline clone. */
960 cgraph_node *find_replacement (void);
961
962 /* Create a new cgraph node which is the new version of
963 callgraph node. REDIRECT_CALLERS holds the callers
964 edges which should be redirected to point to
965 NEW_VERSION. ALL the callees edges of the node
966 are cloned to the new version node. Return the new
967 version node.
968
969 If non-NULL BLOCK_TO_COPY determine what basic blocks
970 was copied to prevent duplications of calls that are dead
971 in the clone.
972
973 SUFFIX is string that is appended to the original name. */
974
975 cgraph_node *create_version_clone (tree new_decl,
976 vec<cgraph_edge *> redirect_callers,
977 bitmap bbs_to_copy,
978 const char *suffix = NULL);
979
980 /* Perform function versioning.
981 Function versioning includes copying of the tree and
982 a callgraph update (creating a new cgraph node and updating
983 its callees and callers).
984
985 REDIRECT_CALLERS varray includes the edges to be redirected
986 to the new version.
987
988 TREE_MAP is a mapping of tree nodes we want to replace with
989 new ones (according to results of prior analysis).
990
991 If non-NULL ARGS_TO_SKIP determine function parameters to remove
992 from new version.
993 If SKIP_RETURN is true, the new version will return void.
994 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
995 If non_NULL NEW_ENTRY determine new entry BB of the clone.
996
997 Return the new version's cgraph node. */
998 cgraph_node *create_version_clone_with_body
999 (vec<cgraph_edge *> redirect_callers,
1000 vec<ipa_replace_map *, va_gc> *tree_map, bitmap args_to_skip,
1001 bool skip_return, bitmap bbs_to_copy, basic_block new_entry_block,
1002 const char *clone_name);
1003
1004 /* Insert a new cgraph_function_version_info node into cgraph_fnver_htab
1005 corresponding to cgraph_node. */
1006 cgraph_function_version_info *insert_new_function_version (void);
1007
1008 /* Get the cgraph_function_version_info node corresponding to node. */
1009 cgraph_function_version_info *function_version (void);
1010
1011 /* Discover all functions and variables that are trivially needed, analyze
1012 them as well as all functions and variables referred by them */
1013 void analyze (void);
1014
1015 /* Add thunk alias into callgraph. The alias declaration is ALIAS and it
1016 aliases DECL with an adjustments made into the first parameter.
1017 See comments in struct cgraph_thunk_info for detail on the parameters. */
1018 cgraph_node * create_thunk (tree alias, tree, bool this_adjusting,
1019 HOST_WIDE_INT fixed_offset,
1020 HOST_WIDE_INT virtual_value,
1021 tree virtual_offset,
1022 tree real_alias);
1023
1024
1025 /* Return node that alias is aliasing. */
1026 inline cgraph_node *get_alias_target (void);
1027
1028 /* Given function symbol, walk the alias chain to return the function node
1029 is alias of. Do not walk through thunks.
1030 When AVAILABILITY is non-NULL, get minimal availability in the chain.
1031 When REF is non-NULL, assume that reference happens in symbol REF
1032 when determining the availability. */
1033
1034 cgraph_node *ultimate_alias_target (availability *availability = NULL,
1035 symtab_node *ref = NULL);
1036
1037 /* Expand thunk NODE to gimple if possible.
1038 When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
1039 no assembler is produced.
1040 When OUTPUT_ASM_THUNK is true, also produce assembler for
1041 thunks that are not lowered. */
1042 bool expand_thunk (bool output_asm_thunks, bool force_gimple_thunk);
1043
1044 /* Call expand_thunk on all callers that are thunks and analyze those
1045 nodes that were expanded. */
1046 void expand_all_artificial_thunks ();
1047
1048 /* Assemble thunks and aliases associated to node. */
1049 void assemble_thunks_and_aliases (void);
1050
1051 /* Expand function specified by node. */
1052 void expand (void);
1053
1054 /* As an GCC extension we allow redefinition of the function. The
1055 semantics when both copies of bodies differ is not well defined.
1056 We replace the old body with new body so in unit at a time mode
1057 we always use new body, while in normal mode we may end up with
1058 old body inlined into some functions and new body expanded and
1059 inlined in others. */
1060 void reset (void);
1061
1062 /* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
1063 kind of wrapper method. */
1064 void create_wrapper (cgraph_node *target);
1065
1066 /* Verify cgraph nodes of the cgraph node. */
1067 void DEBUG_FUNCTION verify_node (void);
1068
1069 /* Remove function from symbol table. */
1070 void remove (void);
1071
1072 /* Dump call graph node to file F. */
1073 void dump (FILE *f);
1074
1075 /* Dump call graph node to stderr. */
1076 void DEBUG_FUNCTION debug (void);
1077
1078 /* When doing LTO, read cgraph_node's body from disk if it is not already
1079 present. */
1080 bool get_untransformed_body (void);
1081
1082 /* Prepare function body. When doing LTO, read cgraph_node's body from disk
1083 if it is not already present. When some IPA transformations are scheduled,
1084 apply them. */
1085 bool get_body (void);
1086
1087 /* Release memory used to represent body of function.
1088 Use this only for functions that are released before being translated to
1089 target code (i.e. RTL). Functions that are compiled to RTL and beyond
1090 are free'd in final.c via free_after_compilation(). */
1091 void release_body (bool keep_arguments = false);
1092
1093 /* Return the DECL_STRUCT_FUNCTION of the function. */
1094 struct function *get_fun (void);
1095
1096 /* cgraph_node is no longer nested function; update cgraph accordingly. */
1097 void unnest (void);
1098
1099 /* Bring cgraph node local. */
1100 void make_local (void);
1101
1102 /* Likewise indicate that a node is having address taken. */
1103 void mark_address_taken (void);
1104
1105 /* Set fialization priority to PRIORITY. */
1106 void set_fini_priority (priority_type priority);
1107
1108 /* Return the finalization priority. */
1109 priority_type get_fini_priority (void);
1110
1111 /* Create edge from a given function to CALLEE in the cgraph. */
1112 cgraph_edge *create_edge (cgraph_node *callee,
1113 gcall *call_stmt, profile_count count,
1114 int freq);
1115
1116 /* Create an indirect edge with a yet-undetermined callee where the call
1117 statement destination is a formal parameter of the caller with index
1118 PARAM_INDEX. */
1119 cgraph_edge *create_indirect_edge (gcall *call_stmt, int ecf_flags,
1120 profile_count count, int freq,
1121 bool compute_indirect_info = true);
1122
1123 /* Like cgraph_create_edge walk the clone tree and update all clones sharing
1124 same function body. If clones already have edge for OLD_STMT; only
1125 update the edge same way as cgraph_set_call_stmt_including_clones does. */
1126 void create_edge_including_clones (cgraph_node *callee,
1127 gimple *old_stmt, gcall *stmt,
1128 profile_count count,
1129 int freq,
1130 cgraph_inline_failed_t reason);
1131
1132 /* Return the callgraph edge representing the GIMPLE_CALL statement
1133 CALL_STMT. */
1134 cgraph_edge *get_edge (gimple *call_stmt);
1135
1136 /* Collect all callers of cgraph_node and its aliases that are known to lead
1137 to NODE (i.e. are not overwritable) and that are not thunks. */
1138 vec<cgraph_edge *> collect_callers (void);
1139
1140 /* Remove all callers from the node. */
1141 void remove_callers (void);
1142
1143 /* Remove all callees from the node. */
1144 void remove_callees (void);
1145
1146 /* Return function availability. See cgraph.h for description of individual
1147 return values. */
1148 enum availability get_availability (symtab_node *ref = NULL);
1149
1150 /* Set TREE_NOTHROW on cgraph_node's decl and on aliases of the node
1151 if any to NOTHROW. */
1152 bool set_nothrow_flag (bool nothrow);
1153
1154 /* If SET_CONST is true, mark function, aliases and thunks to be ECF_CONST.
1155 If SET_CONST if false, clear the flag.
1156
1157 When setting the flag be careful about possible interposition and
1158 do not set the flag for functions that can be interposet and set pure
1159 flag for functions that can bind to other definition.
1160
1161 Return true if any change was done. */
1162
1163 bool set_const_flag (bool set_const, bool looping);
1164
1165 /* Set DECL_PURE_P on cgraph_node's decl and on aliases of the node
1166 if any to PURE.
1167
1168 When setting the flag, be careful about possible interposition.
1169 Return true if any change was done. */
1170
1171 bool set_pure_flag (bool pure, bool looping);
1172
1173 /* Call callback on function and aliases associated to the function.
1174 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
1175 skipped. */
1176
1177 bool call_for_symbol_and_aliases (bool (*callback) (cgraph_node *,
1178 void *),
1179 void *data, bool include_overwritable);
1180
1181 /* Call callback on cgraph_node, thunks and aliases associated to NODE.
1182 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
1183 skipped. When EXCLUDE_VIRTUAL_THUNKS is true, virtual thunks are
1184 skipped. */
1185 bool call_for_symbol_thunks_and_aliases (bool (*callback) (cgraph_node *node,
1186 void *data),
1187 void *data,
1188 bool include_overwritable,
1189 bool exclude_virtual_thunks = false);
1190
1191 /* Likewise indicate that a node is needed, i.e. reachable via some
1192 external means. */
1193 inline void mark_force_output (void);
1194
1195 /* Return true when function can be marked local. */
1196 bool local_p (void);
1197
1198 /* Return true if cgraph_node can be made local for API change.
1199 Extern inline functions and C++ COMDAT functions can be made local
1200 at the expense of possible code size growth if function is used in multiple
1201 compilation units. */
1202 bool can_be_local_p (void);
1203
1204 /* Return true when cgraph_node can not return or throw and thus
1205 it is safe to ignore its side effects for IPA analysis. */
1206 bool cannot_return_p (void);
1207
1208 /* Return true when function cgraph_node and all its aliases are only called
1209 directly.
1210 i.e. it is not externally visible, address was not taken and
1211 it is not used in any other non-standard way. */
1212 bool only_called_directly_p (void);
1213
1214 /* Return true when function is only called directly or it has alias.
1215 i.e. it is not externally visible, address was not taken and
1216 it is not used in any other non-standard way. */
1217 inline bool only_called_directly_or_aliased_p (void);
1218
1219 /* Return true when function cgraph_node can be expected to be removed
1220 from program when direct calls in this compilation unit are removed.
1221
1222 As a special case COMDAT functions are
1223 cgraph_can_remove_if_no_direct_calls_p while the are not
1224 cgraph_only_called_directly_p (it is possible they are called from other
1225 unit)
1226
1227 This function behaves as cgraph_only_called_directly_p because eliminating
1228 all uses of COMDAT function does not make it necessarily disappear from
1229 the program unless we are compiling whole program or we do LTO. In this
1230 case we know we win since dynamic linking will not really discard the
1231 linkonce section.
1232
1233 If WILL_INLINE is true, assume that function will be inlined into all the
1234 direct calls. */
1235 bool will_be_removed_from_program_if_no_direct_calls_p
1236 (bool will_inline = false);
1237
1238 /* Return true when function can be removed from callgraph
1239 if all direct calls and references are eliminated. The function does
1240 not take into account comdat groups. */
1241 bool can_remove_if_no_direct_calls_and_refs_p (void);
1242
1243 /* Return true when function cgraph_node and its aliases can be removed from
1244 callgraph if all direct calls are eliminated.
1245 If WILL_INLINE is true, assume that function will be inlined into all the
1246 direct calls. */
1247 bool can_remove_if_no_direct_calls_p (bool will_inline = false);
1248
1249 /* Return true when callgraph node is a function with Gimple body defined
1250 in current unit. Functions can also be define externally or they
1251 can be thunks with no Gimple representation.
1252
1253 Note that at WPA stage, the function body may not be present in memory. */
1254 inline bool has_gimple_body_p (void);
1255
1256 /* Return true if function should be optimized for size. */
1257 bool optimize_for_size_p (void);
1258
1259 /* Dump the callgraph to file F. */
1260 static void dump_cgraph (FILE *f);
1261
1262 /* Dump the call graph to stderr. */
1263 static inline
1264 void debug_cgraph (void)
1265 {
1266 dump_cgraph (stderr);
1267 }
1268
1269 /* Record that DECL1 and DECL2 are semantically identical function
1270 versions. */
1271 static void record_function_versions (tree decl1, tree decl2);
1272
1273 /* Remove the cgraph_function_version_info and cgraph_node for DECL. This
1274 DECL is a duplicate declaration. */
1275 static void delete_function_version_by_decl (tree decl);
1276
1277 /* Add the function FNDECL to the call graph.
1278 Unlike finalize_function, this function is intended to be used
1279 by middle end and allows insertion of new function at arbitrary point
1280 of compilation. The function can be either in high, low or SSA form
1281 GIMPLE.
1282
1283 The function is assumed to be reachable and have address taken (so no
1284 API breaking optimizations are performed on it).
1285
1286 Main work done by this function is to enqueue the function for later
1287 processing to avoid need the passes to be re-entrant. */
1288 static void add_new_function (tree fndecl, bool lowered);
1289
1290 /* Return callgraph node for given symbol and check it is a function. */
1291 static inline cgraph_node *get (const_tree decl)
1292 {
1293 gcc_checking_assert (TREE_CODE (decl) == FUNCTION_DECL);
1294 return dyn_cast <cgraph_node *> (symtab_node::get (decl));
1295 }
1296
1297 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
1298 logic in effect. If NO_COLLECT is true, then our caller cannot stand to
1299 have the garbage collector run at the moment. We would need to either
1300 create a new GC context, or just not compile right now. */
1301 static void finalize_function (tree, bool);
1302
1303 /* Return cgraph node assigned to DECL. Create new one when needed. */
1304 static cgraph_node * create (tree decl);
1305
1306 /* Try to find a call graph node for declaration DECL and if it does not
1307 exist or if it corresponds to an inline clone, create a new one. */
1308 static cgraph_node * get_create (tree);
1309
1310 /* Return local info for the compiled function. */
1311 static cgraph_local_info *local_info (tree decl);
1312
1313 /* Return local info for the compiled function. */
1314 static struct cgraph_rtl_info *rtl_info (tree);
1315
1316 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
1317 Return NULL if there's no such node. */
1318 static cgraph_node *get_for_asmname (tree asmname);
1319
1320 /* Attempt to mark ALIAS as an alias to DECL. Return alias node if
1321 successful and NULL otherwise.
1322 Same body aliases are output whenever the body of DECL is output,
1323 and cgraph_node::get (ALIAS) transparently
1324 returns cgraph_node::get (DECL). */
1325 static cgraph_node * create_same_body_alias (tree alias, tree decl);
1326
1327 /* Verify whole cgraph structure. */
1328 static void DEBUG_FUNCTION verify_cgraph_nodes (void);
1329
1330 /* Verify cgraph, if consistency checking is enabled. */
1331 static inline void checking_verify_cgraph_nodes (void);
1332
1333 /* Worker to bring NODE local. */
1334 static bool make_local (cgraph_node *node, void *);
1335
1336 /* Mark ALIAS as an alias to DECL. DECL_NODE is cgraph node representing
1337 the function body is associated
1338 with (not necessarily cgraph_node (DECL). */
1339 static cgraph_node *create_alias (tree alias, tree target);
1340
1341 /* Return true if NODE has thunk. */
1342 static bool has_thunk_p (cgraph_node *node, void *);
1343
1344 cgraph_edge *callees;
1345 cgraph_edge *callers;
1346 /* List of edges representing indirect calls with a yet undetermined
1347 callee. */
1348 cgraph_edge *indirect_calls;
1349 /* For nested functions points to function the node is nested in. */
1350 cgraph_node *origin;
1351 /* Points to first nested function, if any. */
1352 cgraph_node *nested;
1353 /* Pointer to the next function with same origin, if any. */
1354 cgraph_node *next_nested;
1355 /* Pointer to the next clone. */
1356 cgraph_node *next_sibling_clone;
1357 cgraph_node *prev_sibling_clone;
1358 cgraph_node *clones;
1359 cgraph_node *clone_of;
1360 /* If instrumentation_clone is 1 then instrumented_version points
1361 to the original function used to make instrumented version.
1362 Otherwise points to instrumented version of the function. */
1363 cgraph_node *instrumented_version;
1364 /* If instrumentation_clone is 1 then orig_decl is the original
1365 function declaration. */
1366 tree orig_decl;
1367 /* For functions with many calls sites it holds map from call expression
1368 to the edge to speed up cgraph_edge function. */
1369 hash_table<cgraph_edge_hasher> *GTY(()) call_site_hash;
1370 /* Declaration node used to be clone of. */
1371 tree former_clone_of;
1372
1373 /* If this is a SIMD clone, this points to the SIMD specific
1374 information for it. */
1375 cgraph_simd_clone *simdclone;
1376 /* If this function has SIMD clones, this points to the first clone. */
1377 cgraph_node *simd_clones;
1378
1379 /* Interprocedural passes scheduled to have their transform functions
1380 applied next time we execute local pass on them. We maintain it
1381 per-function in order to allow IPA passes to introduce new functions. */
1382 vec<ipa_opt_pass> GTY((skip)) ipa_transforms_to_apply;
1383
1384 cgraph_local_info local;
1385 cgraph_global_info global;
1386 struct cgraph_rtl_info *rtl;
1387 cgraph_clone_info clone;
1388 cgraph_thunk_info thunk;
1389
1390 /* Expected number of executions: calculated in profile.c. */
1391 profile_count count;
1392 /* How to scale counts at materialization time; used to merge
1393 LTO units with different number of profile runs. */
1394 int count_materialization_scale;
1395 /* Unique id of the node. */
1396 int uid;
1397 /* Summary unique id of the node. */
1398 int summary_uid;
1399 /* ID assigned by the profiling. */
1400 unsigned int profile_id;
1401 /* Time profiler: first run of function. */
1402 int tp_first_run;
1403
1404 /* Set when decl is an abstract function pointed to by the
1405 ABSTRACT_DECL_ORIGIN of a reachable function. */
1406 unsigned used_as_abstract_origin : 1;
1407 /* Set once the function is lowered (i.e. its CFG is built). */
1408 unsigned lowered : 1;
1409 /* Set once the function has been instantiated and its callee
1410 lists created. */
1411 unsigned process : 1;
1412 /* How commonly executed the node is. Initialized during branch
1413 probabilities pass. */
1414 ENUM_BITFIELD (node_frequency) frequency : 2;
1415 /* True when function can only be called at startup (from static ctor). */
1416 unsigned only_called_at_startup : 1;
1417 /* True when function can only be called at startup (from static dtor). */
1418 unsigned only_called_at_exit : 1;
1419 /* True when function is the transactional clone of a function which
1420 is called only from inside transactions. */
1421 /* ?? We should be able to remove this. We have enough bits in
1422 cgraph to calculate it. */
1423 unsigned tm_clone : 1;
1424 /* True if this decl is a dispatcher for function versions. */
1425 unsigned dispatcher_function : 1;
1426 /* True if this decl calls a COMDAT-local function. This is set up in
1427 compute_fn_summary and inline_call. */
1428 unsigned calls_comdat_local : 1;
1429 /* True if node has been created by merge operation in IPA-ICF. */
1430 unsigned icf_merged: 1;
1431 /* True when function is clone created for Pointer Bounds Checker
1432 instrumentation. */
1433 unsigned instrumentation_clone : 1;
1434 /* True if call to node can't result in a call to free, munmap or
1435 other operation that could make previously non-trapping memory
1436 accesses trapping. */
1437 unsigned nonfreeing_fn : 1;
1438 /* True if there was multiple COMDAT bodies merged by lto-symtab. */
1439 unsigned merged_comdat : 1;
1440 /* True if function was created to be executed in parallel. */
1441 unsigned parallelized_function : 1;
1442 /* True if function is part split out by ipa-split. */
1443 unsigned split_part : 1;
1444 /* True if the function appears as possible target of indirect call. */
1445 unsigned indirect_call_target : 1;
1446
1447 private:
1448 /* Worker for call_for_symbol_and_aliases. */
1449 bool call_for_symbol_and_aliases_1 (bool (*callback) (cgraph_node *,
1450 void *),
1451 void *data, bool include_overwritable);
1452 };
1453
1454 /* A cgraph node set is a collection of cgraph nodes. A cgraph node
1455 can appear in multiple sets. */
1456 struct cgraph_node_set_def
1457 {
1458 hash_map<cgraph_node *, size_t> *map;
1459 vec<cgraph_node *> nodes;
1460 };
1461
1462 typedef cgraph_node_set_def *cgraph_node_set;
1463 typedef struct varpool_node_set_def *varpool_node_set;
1464
1465 class varpool_node;
1466
1467 /* A varpool node set is a collection of varpool nodes. A varpool node
1468 can appear in multiple sets. */
1469 struct varpool_node_set_def
1470 {
1471 hash_map<varpool_node *, size_t> * map;
1472 vec<varpool_node *> nodes;
1473 };
1474
1475 /* Iterator structure for cgraph node sets. */
1476 struct cgraph_node_set_iterator
1477 {
1478 cgraph_node_set set;
1479 unsigned index;
1480 };
1481
1482 /* Iterator structure for varpool node sets. */
1483 struct varpool_node_set_iterator
1484 {
1485 varpool_node_set set;
1486 unsigned index;
1487 };
1488
1489 /* Context of polymorphic call. It represent information about the type of
1490 instance that may reach the call. This is used by ipa-devirt walkers of the
1491 type inheritance graph. */
1492
1493 class GTY(()) ipa_polymorphic_call_context {
1494 public:
1495 /* The called object appears in an object of type OUTER_TYPE
1496 at offset OFFSET. When information is not 100% reliable, we
1497 use SPECULATIVE_OUTER_TYPE and SPECULATIVE_OFFSET. */
1498 HOST_WIDE_INT offset;
1499 HOST_WIDE_INT speculative_offset;
1500 tree outer_type;
1501 tree speculative_outer_type;
1502 /* True if outer object may be in construction or destruction. */
1503 unsigned maybe_in_construction : 1;
1504 /* True if outer object may be of derived type. */
1505 unsigned maybe_derived_type : 1;
1506 /* True if speculative outer object may be of derived type. We always
1507 speculate that construction does not happen. */
1508 unsigned speculative_maybe_derived_type : 1;
1509 /* True if the context is invalid and all calls should be redirected
1510 to BUILTIN_UNREACHABLE. */
1511 unsigned invalid : 1;
1512 /* True if the outer type is dynamic. */
1513 unsigned dynamic : 1;
1514
1515 /* Build empty "I know nothing" context. */
1516 ipa_polymorphic_call_context ();
1517 /* Build polymorphic call context for indirect call E. */
1518 ipa_polymorphic_call_context (cgraph_edge *e);
1519 /* Build polymorphic call context for IP invariant CST.
1520 If specified, OTR_TYPE specify the type of polymorphic call
1521 that takes CST+OFFSET as a prameter. */
1522 ipa_polymorphic_call_context (tree cst, tree otr_type = NULL,
1523 HOST_WIDE_INT offset = 0);
1524 /* Build context for pointer REF contained in FNDECL at statement STMT.
1525 if INSTANCE is non-NULL, return pointer to the object described by
1526 the context. */
1527 ipa_polymorphic_call_context (tree fndecl, tree ref, gimple *stmt,
1528 tree *instance = NULL);
1529
1530 /* Look for vtable stores or constructor calls to work out dynamic type
1531 of memory location. */
1532 bool get_dynamic_type (tree, tree, tree, gimple *);
1533
1534 /* Make context non-speculative. */
1535 void clear_speculation ();
1536
1537 /* Produce context specifying all derrived types of OTR_TYPE. If OTR_TYPE is
1538 NULL, the context is set to dummy "I know nothing" setting. */
1539 void clear_outer_type (tree otr_type = NULL);
1540
1541 /* Walk container types and modify context to point to actual class
1542 containing OTR_TYPE (if non-NULL) as base class.
1543 Return true if resulting context is valid.
1544
1545 When CONSIDER_PLACEMENT_NEW is false, reject contexts that may be made
1546 valid only via allocation of new polymorphic type inside by means
1547 of placement new.
1548
1549 When CONSIDER_BASES is false, only look for actual fields, not base types
1550 of TYPE. */
1551 bool restrict_to_inner_class (tree otr_type,
1552 bool consider_placement_new = true,
1553 bool consider_bases = true);
1554
1555 /* Adjust all offsets in contexts by given number of bits. */
1556 void offset_by (HOST_WIDE_INT);
1557 /* Use when we can not track dynamic type change. This speculatively assume
1558 type change is not happening. */
1559 void possible_dynamic_type_change (bool, tree otr_type = NULL);
1560 /* Assume that both THIS and a given context is valid and strenghten THIS
1561 if possible. Return true if any strenghtening was made.
1562 If actual type the context is being used in is known, OTR_TYPE should be
1563 set accordingly. This improves quality of combined result. */
1564 bool combine_with (ipa_polymorphic_call_context, tree otr_type = NULL);
1565 bool meet_with (ipa_polymorphic_call_context, tree otr_type = NULL);
1566
1567 /* Return TRUE if context is fully useless. */
1568 bool useless_p () const;
1569 /* Return TRUE if this context conveys the same information as X. */
1570 bool equal_to (const ipa_polymorphic_call_context &x) const;
1571
1572 /* Dump human readable context to F. If NEWLINE is true, it will be
1573 terminated by a newline. */
1574 void dump (FILE *f, bool newline = true) const;
1575 void DEBUG_FUNCTION debug () const;
1576
1577 /* LTO streaming. */
1578 void stream_out (struct output_block *) const;
1579 void stream_in (struct lto_input_block *, struct data_in *data_in);
1580
1581 private:
1582 bool combine_speculation_with (tree, HOST_WIDE_INT, bool, tree);
1583 bool meet_speculation_with (tree, HOST_WIDE_INT, bool, tree);
1584 void set_by_decl (tree, HOST_WIDE_INT);
1585 bool set_by_invariant (tree, tree, HOST_WIDE_INT);
1586 bool speculation_consistent_p (tree, HOST_WIDE_INT, bool, tree) const;
1587 void make_speculative (tree otr_type = NULL);
1588 };
1589
1590 /* Structure containing additional information about an indirect call. */
1591
1592 struct GTY(()) cgraph_indirect_call_info
1593 {
1594 /* When agg_content is set, an offset where the call pointer is located
1595 within the aggregate. */
1596 HOST_WIDE_INT offset;
1597 /* Context of the polymorphic call; use only when POLYMORPHIC flag is set. */
1598 ipa_polymorphic_call_context context;
1599 /* OBJ_TYPE_REF_TOKEN of a polymorphic call (if polymorphic is set). */
1600 HOST_WIDE_INT otr_token;
1601 /* Type of the object from OBJ_TYPE_REF_OBJECT. */
1602 tree otr_type;
1603 /* Index of the parameter that is called. */
1604 int param_index;
1605 /* ECF flags determined from the caller. */
1606 int ecf_flags;
1607 /* Profile_id of common target obtrained from profile. */
1608 int common_target_id;
1609 /* Probability that call will land in function with COMMON_TARGET_ID. */
1610 int common_target_probability;
1611
1612 /* Set when the call is a virtual call with the parameter being the
1613 associated object pointer rather than a simple direct call. */
1614 unsigned polymorphic : 1;
1615 /* Set when the call is a call of a pointer loaded from contents of an
1616 aggregate at offset. */
1617 unsigned agg_contents : 1;
1618 /* Set when this is a call through a member pointer. */
1619 unsigned member_ptr : 1;
1620 /* When the agg_contents bit is set, this one determines whether the
1621 destination is loaded from a parameter passed by reference. */
1622 unsigned by_ref : 1;
1623 /* When the agg_contents bit is set, this one determines whether we can
1624 deduce from the function body that the loaded value from the reference is
1625 never modified between the invocation of the function and the load
1626 point. */
1627 unsigned guaranteed_unmodified : 1;
1628 /* For polymorphic calls this specify whether the virtual table pointer
1629 may have changed in between function entry and the call. */
1630 unsigned vptr_changed : 1;
1631 };
1632
1633 struct GTY((chain_next ("%h.next_caller"), chain_prev ("%h.prev_caller"),
1634 for_user)) cgraph_edge {
1635 friend class cgraph_node;
1636
1637 /* Remove the edge in the cgraph. */
1638 void remove (void);
1639
1640 /* Change field call_stmt of edge to NEW_STMT.
1641 If UPDATE_SPECULATIVE and E is any component of speculative
1642 edge, then update all components. */
1643 void set_call_stmt (gcall *new_stmt, bool update_speculative = true);
1644
1645 /* Redirect callee of the edge to N. The function does not update underlying
1646 call expression. */
1647 void redirect_callee (cgraph_node *n);
1648
1649 /* If the edge does not lead to a thunk, simply redirect it to N. Otherwise
1650 create one or more equivalent thunks for N and redirect E to the first in
1651 the chain. Note that it is then necessary to call
1652 n->expand_all_artificial_thunks once all callers are redirected. */
1653 void redirect_callee_duplicating_thunks (cgraph_node *n);
1654
1655 /* Make an indirect edge with an unknown callee an ordinary edge leading to
1656 CALLEE. DELTA is an integer constant that is to be added to the this
1657 pointer (first parameter) to compensate for skipping
1658 a thunk adjustment. */
1659 cgraph_edge *make_direct (cgraph_node *callee);
1660
1661 /* Turn edge into speculative call calling N2. Update
1662 the profile so the direct call is taken COUNT times
1663 with FREQUENCY. */
1664 cgraph_edge *make_speculative (cgraph_node *n2, profile_count direct_count,
1665 int direct_frequency);
1666
1667 /* Given speculative call edge, return all three components. */
1668 void speculative_call_info (cgraph_edge *&direct, cgraph_edge *&indirect,
1669 ipa_ref *&reference);
1670
1671 /* Speculative call edge turned out to be direct call to CALLE_DECL.
1672 Remove the speculative call sequence and return edge representing the call.
1673 It is up to caller to redirect the call as appropriate. */
1674 cgraph_edge *resolve_speculation (tree callee_decl = NULL);
1675
1676 /* If necessary, change the function declaration in the call statement
1677 associated with the edge so that it corresponds to the edge callee. */
1678 gimple *redirect_call_stmt_to_callee (void);
1679
1680 /* Create clone of edge in the node N represented
1681 by CALL_EXPR the callgraph. */
1682 cgraph_edge * clone (cgraph_node *n, gcall *call_stmt, unsigned stmt_uid,
1683 profile_count num, profile_count den, int freq_scale,
1684 bool update_original);
1685
1686 /* Verify edge count and frequency. */
1687 bool verify_count_and_frequency ();
1688
1689 /* Return true when call of edge can not lead to return from caller
1690 and thus it is safe to ignore its side effects for IPA analysis
1691 when computing side effects of the caller. */
1692 bool cannot_lead_to_return_p (void);
1693
1694 /* Return true when the edge represents a direct recursion. */
1695 bool recursive_p (void);
1696
1697 /* Return true if the call can be hot. */
1698 bool maybe_hot_p (void);
1699
1700 /* Rebuild cgraph edges for current function node. This needs to be run after
1701 passes that don't update the cgraph. */
1702 static unsigned int rebuild_edges (void);
1703
1704 /* Rebuild cgraph references for current function node. This needs to be run
1705 after passes that don't update the cgraph. */
1706 static void rebuild_references (void);
1707
1708 /* Expected number of executions: calculated in profile.c. */
1709 profile_count count;
1710 cgraph_node *caller;
1711 cgraph_node *callee;
1712 cgraph_edge *prev_caller;
1713 cgraph_edge *next_caller;
1714 cgraph_edge *prev_callee;
1715 cgraph_edge *next_callee;
1716 gcall *call_stmt;
1717 /* Additional information about an indirect call. Not cleared when an edge
1718 becomes direct. */
1719 cgraph_indirect_call_info *indirect_info;
1720 PTR GTY ((skip (""))) aux;
1721 /* When equal to CIF_OK, inline this call. Otherwise, points to the
1722 explanation why function was not inlined. */
1723 enum cgraph_inline_failed_t inline_failed;
1724 /* The stmt_uid of call_stmt. This is used by LTO to recover the call_stmt
1725 when the function is serialized in. */
1726 unsigned int lto_stmt_uid;
1727 /* Expected frequency of executions within the function.
1728 When set to CGRAPH_FREQ_BASE, the edge is expected to be called once
1729 per function call. The range is 0 to CGRAPH_FREQ_MAX. */
1730 int frequency;
1731 /* Unique id of the edge. */
1732 int uid;
1733 /* Whether this edge was made direct by indirect inlining. */
1734 unsigned int indirect_inlining_edge : 1;
1735 /* Whether this edge describes an indirect call with an undetermined
1736 callee. */
1737 unsigned int indirect_unknown_callee : 1;
1738 /* Whether this edge is still a dangling */
1739 /* True if the corresponding CALL stmt cannot be inlined. */
1740 unsigned int call_stmt_cannot_inline_p : 1;
1741 /* Can this call throw externally? */
1742 unsigned int can_throw_external : 1;
1743 /* Edges with SPECULATIVE flag represents indirect calls that was
1744 speculatively turned into direct (i.e. by profile feedback).
1745 The final code sequence will have form:
1746
1747 if (call_target == expected_fn)
1748 expected_fn ();
1749 else
1750 call_target ();
1751
1752 Every speculative call is represented by three components attached
1753 to a same call statement:
1754 1) a direct call (to expected_fn)
1755 2) an indirect call (to call_target)
1756 3) a IPA_REF_ADDR refrence to expected_fn.
1757
1758 Optimizers may later redirect direct call to clone, so 1) and 3)
1759 do not need to necesarily agree with destination. */
1760 unsigned int speculative : 1;
1761 /* Set to true when caller is a constructor or destructor of polymorphic
1762 type. */
1763 unsigned in_polymorphic_cdtor : 1;
1764
1765 /* Return true if call must bind to current definition. */
1766 bool binds_to_current_def_p ();
1767
1768 private:
1769 /* Remove the edge from the list of the callers of the callee. */
1770 void remove_caller (void);
1771
1772 /* Remove the edge from the list of the callees of the caller. */
1773 void remove_callee (void);
1774
1775 /* Set callee N of call graph edge and add it to the corresponding set of
1776 callers. */
1777 void set_callee (cgraph_node *n);
1778
1779 /* Output flags of edge to a file F. */
1780 void dump_edge_flags (FILE *f);
1781
1782 /* Verify that call graph edge corresponds to DECL from the associated
1783 statement. Return true if the verification should fail. */
1784 bool verify_corresponds_to_fndecl (tree decl);
1785 };
1786
1787 #define CGRAPH_FREQ_BASE 1000
1788 #define CGRAPH_FREQ_MAX 100000
1789
1790 /* The varpool data structure.
1791 Each static variable decl has assigned varpool_node. */
1792
1793 class GTY((tag ("SYMTAB_VARIABLE"))) varpool_node : public symtab_node {
1794 public:
1795 /* Dump given varpool node to F. */
1796 void dump (FILE *f);
1797
1798 /* Dump given varpool node to stderr. */
1799 void DEBUG_FUNCTION debug (void);
1800
1801 /* Remove variable from symbol table. */
1802 void remove (void);
1803
1804 /* Remove node initializer when it is no longer needed. */
1805 void remove_initializer (void);
1806
1807 void analyze (void);
1808
1809 /* Return variable availability. */
1810 availability get_availability (symtab_node *ref = NULL);
1811
1812 /* When doing LTO, read variable's constructor from disk if
1813 it is not already present. */
1814 tree get_constructor (void);
1815
1816 /* Return true if variable has constructor that can be used for folding. */
1817 bool ctor_useable_for_folding_p (void);
1818
1819 /* For given variable pool node, walk the alias chain to return the function
1820 the variable is alias of. Do not walk through thunks.
1821 When AVAILABILITY is non-NULL, get minimal availability in the chain.
1822 When REF is non-NULL, assume that reference happens in symbol REF
1823 when determining the availability. */
1824 inline varpool_node *ultimate_alias_target
1825 (availability *availability = NULL, symtab_node *ref = NULL);
1826
1827 /* Return node that alias is aliasing. */
1828 inline varpool_node *get_alias_target (void);
1829
1830 /* Output one variable, if necessary. Return whether we output it. */
1831 bool assemble_decl (void);
1832
1833 /* For variables in named sections make sure get_variable_section
1834 is called before we switch to those sections. Then section
1835 conflicts between read-only and read-only requiring relocations
1836 sections can be resolved. */
1837 void finalize_named_section_flags (void);
1838
1839 /* Call calback on varpool symbol and aliases associated to varpool symbol.
1840 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
1841 skipped. */
1842 bool call_for_symbol_and_aliases (bool (*callback) (varpool_node *, void *),
1843 void *data,
1844 bool include_overwritable);
1845
1846 /* Return true when variable should be considered externally visible. */
1847 bool externally_visible_p (void);
1848
1849 /* Return true when all references to variable must be visible
1850 in ipa_ref_list.
1851 i.e. if the variable is not externally visible or not used in some magic
1852 way (asm statement or such).
1853 The magic uses are all summarized in force_output flag. */
1854 inline bool all_refs_explicit_p ();
1855
1856 /* Return true when variable can be removed from variable pool
1857 if all direct calls are eliminated. */
1858 inline bool can_remove_if_no_refs_p (void);
1859
1860 /* Add the variable DECL to the varpool.
1861 Unlike finalize_decl function is intended to be used
1862 by middle end and allows insertion of new variable at arbitrary point
1863 of compilation. */
1864 static void add (tree decl);
1865
1866 /* Return varpool node for given symbol and check it is a function. */
1867 static inline varpool_node *get (const_tree decl);
1868
1869 /* Mark DECL as finalized. By finalizing the declaration, frontend instruct
1870 the middle end to output the variable to asm file, if needed or externally
1871 visible. */
1872 static void finalize_decl (tree decl);
1873
1874 /* Attempt to mark ALIAS as an alias to DECL. Return TRUE if successful.
1875 Extra name aliases are output whenever DECL is output. */
1876 static varpool_node * create_extra_name_alias (tree alias, tree decl);
1877
1878 /* Attempt to mark ALIAS as an alias to DECL. Return TRUE if successful.
1879 Extra name aliases are output whenever DECL is output. */
1880 static varpool_node * create_alias (tree, tree);
1881
1882 /* Dump the variable pool to F. */
1883 static void dump_varpool (FILE *f);
1884
1885 /* Dump the variable pool to stderr. */
1886 static void DEBUG_FUNCTION debug_varpool (void);
1887
1888 /* Allocate new callgraph node and insert it into basic data structures. */
1889 static varpool_node *create_empty (void);
1890
1891 /* Return varpool node assigned to DECL. Create new one when needed. */
1892 static varpool_node *get_create (tree decl);
1893
1894 /* Given an assembler name, lookup node. */
1895 static varpool_node *get_for_asmname (tree asmname);
1896
1897 /* Set when variable is scheduled to be assembled. */
1898 unsigned output : 1;
1899
1900 /* Set when variable has statically initialized pointer
1901 or is a static bounds variable and needs initalization. */
1902 unsigned need_bounds_init : 1;
1903
1904 /* Set if the variable is dynamically initialized, except for
1905 function local statics. */
1906 unsigned dynamically_initialized : 1;
1907
1908 ENUM_BITFIELD(tls_model) tls_model : 3;
1909
1910 /* Set if the variable is known to be used by single function only.
1911 This is computed by ipa_signle_use pass and used by late optimizations
1912 in places where optimization would be valid for local static variable
1913 if we did not do any inter-procedural code movement. */
1914 unsigned used_by_single_function : 1;
1915
1916 private:
1917 /* Assemble thunks and aliases associated to varpool node. */
1918 void assemble_aliases (void);
1919
1920 /* Worker for call_for_node_and_aliases. */
1921 bool call_for_symbol_and_aliases_1 (bool (*callback) (varpool_node *, void *),
1922 void *data,
1923 bool include_overwritable);
1924 };
1925
1926 /* Every top level asm statement is put into a asm_node. */
1927
1928 struct GTY(()) asm_node {
1929
1930
1931 /* Next asm node. */
1932 asm_node *next;
1933 /* String for this asm node. */
1934 tree asm_str;
1935 /* Ordering of all cgraph nodes. */
1936 int order;
1937 };
1938
1939 /* Report whether or not THIS symtab node is a function, aka cgraph_node. */
1940
1941 template <>
1942 template <>
1943 inline bool
1944 is_a_helper <cgraph_node *>::test (symtab_node *p)
1945 {
1946 return p && p->type == SYMTAB_FUNCTION;
1947 }
1948
1949 /* Report whether or not THIS symtab node is a vriable, aka varpool_node. */
1950
1951 template <>
1952 template <>
1953 inline bool
1954 is_a_helper <varpool_node *>::test (symtab_node *p)
1955 {
1956 return p && p->type == SYMTAB_VARIABLE;
1957 }
1958
1959 /* Macros to access the next item in the list of free cgraph nodes and
1960 edges. */
1961 #define NEXT_FREE_NODE(NODE) dyn_cast<cgraph_node *> ((NODE)->next)
1962 #define SET_NEXT_FREE_NODE(NODE,NODE2) ((NODE))->next = NODE2
1963 #define NEXT_FREE_EDGE(EDGE) (EDGE)->prev_caller
1964
1965 typedef void (*cgraph_edge_hook)(cgraph_edge *, void *);
1966 typedef void (*cgraph_node_hook)(cgraph_node *, void *);
1967 typedef void (*varpool_node_hook)(varpool_node *, void *);
1968 typedef void (*cgraph_2edge_hook)(cgraph_edge *, cgraph_edge *, void *);
1969 typedef void (*cgraph_2node_hook)(cgraph_node *, cgraph_node *, void *);
1970
1971 struct cgraph_edge_hook_list;
1972 struct cgraph_node_hook_list;
1973 struct varpool_node_hook_list;
1974 struct cgraph_2edge_hook_list;
1975 struct cgraph_2node_hook_list;
1976
1977 /* Map from a symbol to initialization/finalization priorities. */
1978 struct GTY(()) symbol_priority_map {
1979 priority_type init;
1980 priority_type fini;
1981 };
1982
1983 enum symtab_state
1984 {
1985 /* Frontend is parsing and finalizing functions. */
1986 PARSING,
1987 /* Callgraph is being constructed. It is safe to add new functions. */
1988 CONSTRUCTION,
1989 /* Callgraph is being streamed-in at LTO time. */
1990 LTO_STREAMING,
1991 /* Callgraph is built and early IPA passes are being run. */
1992 IPA,
1993 /* Callgraph is built and all functions are transformed to SSA form. */
1994 IPA_SSA,
1995 /* All inline decisions are done; it is now possible to remove extern inline
1996 functions and virtual call targets. */
1997 IPA_SSA_AFTER_INLINING,
1998 /* Functions are now ordered and being passed to RTL expanders. */
1999 EXPANSION,
2000 /* All cgraph expansion is done. */
2001 FINISHED
2002 };
2003
2004 struct asmname_hasher : ggc_ptr_hash <symtab_node>
2005 {
2006 typedef const_tree compare_type;
2007
2008 static hashval_t hash (symtab_node *n);
2009 static bool equal (symtab_node *n, const_tree t);
2010 };
2011
2012 class GTY((tag ("SYMTAB"))) symbol_table
2013 {
2014 public:
2015 friend class symtab_node;
2016 friend class cgraph_node;
2017 friend class cgraph_edge;
2018
2019 symbol_table (): cgraph_max_summary_uid (1)
2020 {
2021 }
2022
2023 /* Initialize callgraph dump file. */
2024 void initialize (void);
2025
2026 /* Register a top-level asm statement ASM_STR. */
2027 inline asm_node *finalize_toplevel_asm (tree asm_str);
2028
2029 /* Analyze the whole compilation unit once it is parsed completely. */
2030 void finalize_compilation_unit (void);
2031
2032 /* C++ frontend produce same body aliases all over the place, even before PCH
2033 gets streamed out. It relies on us linking the aliases with their function
2034 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
2035 first produce aliases without links, but once C++ FE is sure he won't sream
2036 PCH we build the links via this function. */
2037 void process_same_body_aliases (void);
2038
2039 /* Perform simple optimizations based on callgraph. */
2040 void compile (void);
2041
2042 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
2043 functions into callgraph in a way so they look like ordinary reachable
2044 functions inserted into callgraph already at construction time. */
2045 void process_new_functions (void);
2046
2047 /* Once all functions from compilation unit are in memory, produce all clones
2048 and update all calls. We might also do this on demand if we don't want to
2049 bring all functions to memory prior compilation, but current WHOPR
2050 implementation does that and it is bit easier to keep everything right
2051 in this order. */
2052 void materialize_all_clones (void);
2053
2054 /* Register a symbol NODE. */
2055 inline void register_symbol (symtab_node *node);
2056
2057 inline void
2058 clear_asm_symbols (void)
2059 {
2060 asmnodes = NULL;
2061 asm_last_node = NULL;
2062 }
2063
2064 /* Perform reachability analysis and reclaim all unreachable nodes. */
2065 bool remove_unreachable_nodes (FILE *file);
2066
2067 /* Optimization of function bodies might've rendered some variables as
2068 unnecessary so we want to avoid these from being compiled. Re-do
2069 reachability starting from variables that are either externally visible
2070 or was referred from the asm output routines. */
2071 void remove_unreferenced_decls (void);
2072
2073 /* Unregister a symbol NODE. */
2074 inline void unregister (symtab_node *node);
2075
2076 /* Allocate new callgraph node and insert it into basic data structures. */
2077 cgraph_node *create_empty (void);
2078
2079 /* Release a callgraph NODE with UID and put in to the list
2080 of free nodes. */
2081 void release_symbol (cgraph_node *node, int uid);
2082
2083 /* Output all variables enqueued to be assembled. */
2084 bool output_variables (void);
2085
2086 /* Weakrefs may be associated to external decls and thus not output
2087 at expansion time. Emit all necessary aliases. */
2088 void output_weakrefs (void);
2089
2090 /* Return first static symbol with definition. */
2091 inline symtab_node *first_symbol (void);
2092
2093 /* Return first assembler symbol. */
2094 inline asm_node *
2095 first_asm_symbol (void)
2096 {
2097 return asmnodes;
2098 }
2099
2100 /* Return first static symbol with definition. */
2101 inline symtab_node *first_defined_symbol (void);
2102
2103 /* Return first variable. */
2104 inline varpool_node *first_variable (void);
2105
2106 /* Return next variable after NODE. */
2107 inline varpool_node *next_variable (varpool_node *node);
2108
2109 /* Return first static variable with initializer. */
2110 inline varpool_node *first_static_initializer (void);
2111
2112 /* Return next static variable with initializer after NODE. */
2113 inline varpool_node *next_static_initializer (varpool_node *node);
2114
2115 /* Return first static variable with definition. */
2116 inline varpool_node *first_defined_variable (void);
2117
2118 /* Return next static variable with definition after NODE. */
2119 inline varpool_node *next_defined_variable (varpool_node *node);
2120
2121 /* Return first function with body defined. */
2122 inline cgraph_node *first_defined_function (void);
2123
2124 /* Return next function with body defined after NODE. */
2125 inline cgraph_node *next_defined_function (cgraph_node *node);
2126
2127 /* Return first function. */
2128 inline cgraph_node *first_function (void);
2129
2130 /* Return next function. */
2131 inline cgraph_node *next_function (cgraph_node *node);
2132
2133 /* Return first function with body defined. */
2134 cgraph_node *first_function_with_gimple_body (void);
2135
2136 /* Return next reachable static variable with initializer after NODE. */
2137 inline cgraph_node *next_function_with_gimple_body (cgraph_node *node);
2138
2139 /* Register HOOK to be called with DATA on each removed edge. */
2140 cgraph_edge_hook_list *add_edge_removal_hook (cgraph_edge_hook hook,
2141 void *data);
2142
2143 /* Remove ENTRY from the list of hooks called on removing edges. */
2144 void remove_edge_removal_hook (cgraph_edge_hook_list *entry);
2145
2146 /* Register HOOK to be called with DATA on each removed node. */
2147 cgraph_node_hook_list *add_cgraph_removal_hook (cgraph_node_hook hook,
2148 void *data);
2149
2150 /* Remove ENTRY from the list of hooks called on removing nodes. */
2151 void remove_cgraph_removal_hook (cgraph_node_hook_list *entry);
2152
2153 /* Register HOOK to be called with DATA on each removed node. */
2154 varpool_node_hook_list *add_varpool_removal_hook (varpool_node_hook hook,
2155 void *data);
2156
2157 /* Remove ENTRY from the list of hooks called on removing nodes. */
2158 void remove_varpool_removal_hook (varpool_node_hook_list *entry);
2159
2160 /* Register HOOK to be called with DATA on each inserted node. */
2161 cgraph_node_hook_list *add_cgraph_insertion_hook (cgraph_node_hook hook,
2162 void *data);
2163
2164 /* Remove ENTRY from the list of hooks called on inserted nodes. */
2165 void remove_cgraph_insertion_hook (cgraph_node_hook_list *entry);
2166
2167 /* Register HOOK to be called with DATA on each inserted node. */
2168 varpool_node_hook_list *add_varpool_insertion_hook (varpool_node_hook hook,
2169 void *data);
2170
2171 /* Remove ENTRY from the list of hooks called on inserted nodes. */
2172 void remove_varpool_insertion_hook (varpool_node_hook_list *entry);
2173
2174 /* Register HOOK to be called with DATA on each duplicated edge. */
2175 cgraph_2edge_hook_list *add_edge_duplication_hook (cgraph_2edge_hook hook,
2176 void *data);
2177 /* Remove ENTRY from the list of hooks called on duplicating edges. */
2178 void remove_edge_duplication_hook (cgraph_2edge_hook_list *entry);
2179
2180 /* Register HOOK to be called with DATA on each duplicated node. */
2181 cgraph_2node_hook_list *add_cgraph_duplication_hook (cgraph_2node_hook hook,
2182 void *data);
2183
2184 /* Remove ENTRY from the list of hooks called on duplicating nodes. */
2185 void remove_cgraph_duplication_hook (cgraph_2node_hook_list *entry);
2186
2187 /* Call all edge removal hooks. */
2188 void call_edge_removal_hooks (cgraph_edge *e);
2189
2190 /* Call all node insertion hooks. */
2191 void call_cgraph_insertion_hooks (cgraph_node *node);
2192
2193 /* Call all node removal hooks. */
2194 void call_cgraph_removal_hooks (cgraph_node *node);
2195
2196 /* Call all node duplication hooks. */
2197 void call_cgraph_duplication_hooks (cgraph_node *node, cgraph_node *node2);
2198
2199 /* Call all edge duplication hooks. */
2200 void call_edge_duplication_hooks (cgraph_edge *cs1, cgraph_edge *cs2);
2201
2202 /* Call all node removal hooks. */
2203 void call_varpool_removal_hooks (varpool_node *node);
2204
2205 /* Call all node insertion hooks. */
2206 void call_varpool_insertion_hooks (varpool_node *node);
2207
2208 /* Arrange node to be first in its entry of assembler_name_hash. */
2209 void symtab_prevail_in_asm_name_hash (symtab_node *node);
2210
2211 /* Initalize asm name hash unless. */
2212 void symtab_initialize_asm_name_hash (void);
2213
2214 /* Set the DECL_ASSEMBLER_NAME and update symtab hashtables. */
2215 void change_decl_assembler_name (tree decl, tree name);
2216
2217 /* Dump symbol table to F. */
2218 void dump (FILE *f);
2219
2220 /* Dump symbol table to stderr. */
2221 inline DEBUG_FUNCTION void debug (void)
2222 {
2223 dump (stderr);
2224 }
2225
2226 /* Return true if assembler names NAME1 and NAME2 leads to the same symbol
2227 name. */
2228 static bool assembler_names_equal_p (const char *name1, const char *name2);
2229
2230 int cgraph_count;
2231 int cgraph_max_uid;
2232 int cgraph_max_summary_uid;
2233
2234 int edges_count;
2235 int edges_max_uid;
2236
2237 symtab_node* GTY(()) nodes;
2238 asm_node* GTY(()) asmnodes;
2239 asm_node* GTY(()) asm_last_node;
2240 cgraph_node* GTY(()) free_nodes;
2241
2242 /* Head of a linked list of unused (freed) call graph edges.
2243 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
2244 cgraph_edge * GTY(()) free_edges;
2245
2246 /* The order index of the next symtab node to be created. This is
2247 used so that we can sort the cgraph nodes in order by when we saw
2248 them, to support -fno-toplevel-reorder. */
2249 int order;
2250
2251 /* Set when whole unit has been analyzed so we can access global info. */
2252 bool global_info_ready;
2253 /* What state callgraph is in right now. */
2254 enum symtab_state state;
2255 /* Set when the cgraph is fully build and the basic flags are computed. */
2256 bool function_flags_ready;
2257
2258 bool cpp_implicit_aliases_done;
2259
2260 /* Hash table used to hold sectoons. */
2261 hash_table<section_name_hasher> *GTY(()) section_hash;
2262
2263 /* Hash table used to convert assembler names into nodes. */
2264 hash_table<asmname_hasher> *assembler_name_hash;
2265
2266 /* Hash table used to hold init priorities. */
2267 hash_map<symtab_node *, symbol_priority_map> *init_priority_hash;
2268
2269 FILE* GTY ((skip)) dump_file;
2270
2271 /* Return symbol used to separate symbol name from suffix. */
2272 static char symbol_suffix_separator ();
2273
2274 FILE* GTY ((skip)) ipa_clones_dump_file;
2275
2276 hash_set <const cgraph_node *> GTY ((skip)) cloned_nodes;
2277
2278 private:
2279 /* Allocate new callgraph node. */
2280 inline cgraph_node * allocate_cgraph_symbol (void);
2281
2282 /* Allocate a cgraph_edge structure and fill it with data according to the
2283 parameters of which only CALLEE can be NULL (when creating an indirect call
2284 edge). */
2285 cgraph_edge *create_edge (cgraph_node *caller, cgraph_node *callee,
2286 gcall *call_stmt, profile_count count, int freq,
2287 bool indir_unknown_callee);
2288
2289 /* Put the edge onto the free list. */
2290 void free_edge (cgraph_edge *e);
2291
2292 /* Insert NODE to assembler name hash. */
2293 void insert_to_assembler_name_hash (symtab_node *node, bool with_clones);
2294
2295 /* Remove NODE from assembler name hash. */
2296 void unlink_from_assembler_name_hash (symtab_node *node, bool with_clones);
2297
2298 /* Hash asmnames ignoring the user specified marks. */
2299 static hashval_t decl_assembler_name_hash (const_tree asmname);
2300
2301 /* Compare ASMNAME with the DECL_ASSEMBLER_NAME of DECL. */
2302 static bool decl_assembler_name_equal (tree decl, const_tree asmname);
2303
2304 friend struct asmname_hasher;
2305
2306 /* List of hooks triggered when an edge is removed. */
2307 cgraph_edge_hook_list * GTY((skip)) m_first_edge_removal_hook;
2308 /* List of hooks triggem_red when a cgraph node is removed. */
2309 cgraph_node_hook_list * GTY((skip)) m_first_cgraph_removal_hook;
2310 /* List of hooks triggered when an edge is duplicated. */
2311 cgraph_2edge_hook_list * GTY((skip)) m_first_edge_duplicated_hook;
2312 /* List of hooks triggered when a node is duplicated. */
2313 cgraph_2node_hook_list * GTY((skip)) m_first_cgraph_duplicated_hook;
2314 /* List of hooks triggered when an function is inserted. */
2315 cgraph_node_hook_list * GTY((skip)) m_first_cgraph_insertion_hook;
2316 /* List of hooks triggered when an variable is inserted. */
2317 varpool_node_hook_list * GTY((skip)) m_first_varpool_insertion_hook;
2318 /* List of hooks triggered when a node is removed. */
2319 varpool_node_hook_list * GTY((skip)) m_first_varpool_removal_hook;
2320 };
2321
2322 extern GTY(()) symbol_table *symtab;
2323
2324 extern vec<cgraph_node *> cgraph_new_nodes;
2325
2326 inline hashval_t
2327 asmname_hasher::hash (symtab_node *n)
2328 {
2329 return symbol_table::decl_assembler_name_hash
2330 (DECL_ASSEMBLER_NAME (n->decl));
2331 }
2332
2333 inline bool
2334 asmname_hasher::equal (symtab_node *n, const_tree t)
2335 {
2336 return symbol_table::decl_assembler_name_equal (n->decl, t);
2337 }
2338
2339 /* In cgraph.c */
2340 void cgraph_c_finalize (void);
2341 void release_function_body (tree);
2342 cgraph_indirect_call_info *cgraph_allocate_init_indirect_info (void);
2343
2344 void cgraph_update_edges_for_call_stmt (gimple *, tree, gimple *);
2345 bool cgraph_function_possibly_inlined_p (tree);
2346
2347 const char* cgraph_inline_failed_string (cgraph_inline_failed_t);
2348 cgraph_inline_failed_type_t cgraph_inline_failed_type (cgraph_inline_failed_t);
2349
2350 extern bool gimple_check_call_matching_types (gimple *, tree, bool);
2351
2352 /* In cgraphunit.c */
2353 void cgraphunit_c_finalize (void);
2354
2355 /* Initialize datastructures so DECL is a function in lowered gimple form.
2356 IN_SSA is true if the gimple is in SSA. */
2357 basic_block init_lowered_empty_function (tree, bool, profile_count);
2358
2359 tree thunk_adjust (gimple_stmt_iterator *, tree, bool, HOST_WIDE_INT, tree);
2360 /* In cgraphclones.c */
2361
2362 tree clone_function_name_1 (const char *, const char *);
2363 tree clone_function_name (tree decl, const char *);
2364
2365 void tree_function_versioning (tree, tree, vec<ipa_replace_map *, va_gc> *,
2366 bool, bitmap, bool, bitmap, basic_block);
2367
2368 void dump_callgraph_transformation (const cgraph_node *original,
2369 const cgraph_node *clone,
2370 const char *suffix);
2371 tree cgraph_build_function_type_skip_args (tree orig_type, bitmap args_to_skip,
2372 bool skip_return);
2373
2374 /* In cgraphbuild.c */
2375 int compute_call_stmt_bb_frequency (tree, basic_block bb);
2376 void record_references_in_initializer (tree, bool);
2377
2378 /* In ipa.c */
2379 void cgraph_build_static_cdtor (char which, tree body, int priority);
2380 bool ipa_discover_readonly_nonaddressable_vars (void);
2381
2382 /* In varpool.c */
2383 tree ctor_for_folding (tree);
2384
2385 /* In tree-chkp.c */
2386 extern bool chkp_function_instrumented_p (tree fndecl);
2387
2388 /* In ipa-inline-analysis.c */
2389 void initialize_inline_failed (struct cgraph_edge *);
2390 bool speculation_useful_p (struct cgraph_edge *e, bool anticipate_inlining);
2391
2392 /* Return true when the symbol is real symbol, i.e. it is not inline clone
2393 or abstract function kept for debug info purposes only. */
2394 inline bool
2395 symtab_node::real_symbol_p (void)
2396 {
2397 cgraph_node *cnode;
2398
2399 if (DECL_ABSTRACT_P (decl))
2400 return false;
2401 if (transparent_alias && definition)
2402 return false;
2403 if (!is_a <cgraph_node *> (this))
2404 return true;
2405 cnode = dyn_cast <cgraph_node *> (this);
2406 if (cnode->global.inlined_to)
2407 return false;
2408 return true;
2409 }
2410
2411 /* Return true if DECL should have entry in symbol table if used.
2412 Those are functions and static & external veriables*/
2413
2414 static inline bool
2415 decl_in_symtab_p (const_tree decl)
2416 {
2417 return (TREE_CODE (decl) == FUNCTION_DECL
2418 || (TREE_CODE (decl) == VAR_DECL
2419 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl))));
2420 }
2421
2422 inline bool
2423 symtab_node::in_same_comdat_group_p (symtab_node *target)
2424 {
2425 symtab_node *source = this;
2426
2427 if (cgraph_node *cn = dyn_cast <cgraph_node *> (target))
2428 {
2429 if (cn->global.inlined_to)
2430 source = cn->global.inlined_to;
2431 }
2432 if (cgraph_node *cn = dyn_cast <cgraph_node *> (target))
2433 {
2434 if (cn->global.inlined_to)
2435 target = cn->global.inlined_to;
2436 }
2437
2438 return source->get_comdat_group () == target->get_comdat_group ();
2439 }
2440
2441 /* Return node that alias is aliasing. */
2442
2443 inline symtab_node *
2444 symtab_node::get_alias_target (void)
2445 {
2446 ipa_ref *ref = NULL;
2447 iterate_reference (0, ref);
2448 if (ref->use == IPA_REF_CHKP)
2449 iterate_reference (1, ref);
2450 gcc_checking_assert (ref->use == IPA_REF_ALIAS);
2451 return ref->referred;
2452 }
2453
2454 /* Return next reachable static symbol with initializer after the node. */
2455
2456 inline symtab_node *
2457 symtab_node::next_defined_symbol (void)
2458 {
2459 symtab_node *node1 = next;
2460
2461 for (; node1; node1 = node1->next)
2462 if (node1->definition)
2463 return node1;
2464
2465 return NULL;
2466 }
2467
2468 /* Iterates I-th reference in the list, REF is also set. */
2469
2470 inline ipa_ref *
2471 symtab_node::iterate_reference (unsigned i, ipa_ref *&ref)
2472 {
2473 vec_safe_iterate (ref_list.references, i, &ref);
2474
2475 return ref;
2476 }
2477
2478 /* Iterates I-th referring item in the list, REF is also set. */
2479
2480 inline ipa_ref *
2481 symtab_node::iterate_referring (unsigned i, ipa_ref *&ref)
2482 {
2483 ref_list.referring.iterate (i, &ref);
2484
2485 return ref;
2486 }
2487
2488 /* Iterates I-th referring alias item in the list, REF is also set. */
2489
2490 inline ipa_ref *
2491 symtab_node::iterate_direct_aliases (unsigned i, ipa_ref *&ref)
2492 {
2493 ref_list.referring.iterate (i, &ref);
2494
2495 if (ref && ref->use != IPA_REF_ALIAS)
2496 return NULL;
2497
2498 return ref;
2499 }
2500
2501 /* Return true if list contains an alias. */
2502
2503 inline bool
2504 symtab_node::has_aliases_p (void)
2505 {
2506 ipa_ref *ref = NULL;
2507
2508 return (iterate_direct_aliases (0, ref) != NULL);
2509 }
2510
2511 /* Return true when RESOLUTION indicate that linker will use
2512 the symbol from non-LTO object files. */
2513
2514 inline bool
2515 resolution_used_from_other_file_p (enum ld_plugin_symbol_resolution resolution)
2516 {
2517 return (resolution == LDPR_PREVAILING_DEF
2518 || resolution == LDPR_PREEMPTED_REG
2519 || resolution == LDPR_RESOLVED_EXEC
2520 || resolution == LDPR_RESOLVED_DYN);
2521 }
2522
2523 /* Return true when symtab_node is known to be used from other (non-LTO)
2524 object file. Known only when doing LTO via linker plugin. */
2525
2526 inline bool
2527 symtab_node::used_from_object_file_p (void)
2528 {
2529 if (!TREE_PUBLIC (decl) || DECL_EXTERNAL (decl))
2530 return false;
2531 if (resolution_used_from_other_file_p (resolution))
2532 return true;
2533 return false;
2534 }
2535
2536 /* Return varpool node for given symbol and check it is a function. */
2537
2538 inline varpool_node *
2539 varpool_node::get (const_tree decl)
2540 {
2541 gcc_checking_assert (TREE_CODE (decl) == VAR_DECL);
2542 return dyn_cast<varpool_node *> (symtab_node::get (decl));
2543 }
2544
2545 /* Register a symbol NODE. */
2546
2547 inline void
2548 symbol_table::register_symbol (symtab_node *node)
2549 {
2550 node->next = nodes;
2551 node->previous = NULL;
2552
2553 if (nodes)
2554 nodes->previous = node;
2555 nodes = node;
2556
2557 node->order = order++;
2558 }
2559
2560 /* Register a top-level asm statement ASM_STR. */
2561
2562 asm_node *
2563 symbol_table::finalize_toplevel_asm (tree asm_str)
2564 {
2565 asm_node *node;
2566
2567 node = ggc_cleared_alloc<asm_node> ();
2568 node->asm_str = asm_str;
2569 node->order = order++;
2570 node->next = NULL;
2571
2572 if (asmnodes == NULL)
2573 asmnodes = node;
2574 else
2575 asm_last_node->next = node;
2576
2577 asm_last_node = node;
2578 return node;
2579 }
2580
2581 /* Unregister a symbol NODE. */
2582 inline void
2583 symbol_table::unregister (symtab_node *node)
2584 {
2585 if (node->previous)
2586 node->previous->next = node->next;
2587 else
2588 nodes = node->next;
2589
2590 if (node->next)
2591 node->next->previous = node->previous;
2592
2593 node->next = NULL;
2594 node->previous = NULL;
2595 }
2596
2597 /* Release a callgraph NODE with UID and put in to the list of free nodes. */
2598
2599 inline void
2600 symbol_table::release_symbol (cgraph_node *node, int uid)
2601 {
2602 cgraph_count--;
2603
2604 /* Clear out the node to NULL all pointers and add the node to the free
2605 list. */
2606 memset (node, 0, sizeof (*node));
2607 node->type = SYMTAB_FUNCTION;
2608 node->uid = uid;
2609 SET_NEXT_FREE_NODE (node, free_nodes);
2610 free_nodes = node;
2611 }
2612
2613 /* Allocate new callgraph node. */
2614
2615 inline cgraph_node *
2616 symbol_table::allocate_cgraph_symbol (void)
2617 {
2618 cgraph_node *node;
2619
2620 if (free_nodes)
2621 {
2622 node = free_nodes;
2623 free_nodes = NEXT_FREE_NODE (node);
2624 }
2625 else
2626 {
2627 node = ggc_cleared_alloc<cgraph_node> ();
2628 node->uid = cgraph_max_uid++;
2629 }
2630
2631 node->summary_uid = cgraph_max_summary_uid++;
2632 return node;
2633 }
2634
2635
2636 /* Return first static symbol with definition. */
2637 inline symtab_node *
2638 symbol_table::first_symbol (void)
2639 {
2640 return nodes;
2641 }
2642
2643 /* Walk all symbols. */
2644 #define FOR_EACH_SYMBOL(node) \
2645 for ((node) = symtab->first_symbol (); (node); (node) = (node)->next)
2646
2647 /* Return first static symbol with definition. */
2648 inline symtab_node *
2649 symbol_table::first_defined_symbol (void)
2650 {
2651 symtab_node *node;
2652
2653 for (node = nodes; node; node = node->next)
2654 if (node->definition)
2655 return node;
2656
2657 return NULL;
2658 }
2659
2660 /* Walk all symbols with definitions in current unit. */
2661 #define FOR_EACH_DEFINED_SYMBOL(node) \
2662 for ((node) = symtab->first_defined_symbol (); (node); \
2663 (node) = node->next_defined_symbol ())
2664
2665 /* Return first variable. */
2666 inline varpool_node *
2667 symbol_table::first_variable (void)
2668 {
2669 symtab_node *node;
2670 for (node = nodes; node; node = node->next)
2671 if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
2672 return vnode;
2673 return NULL;
2674 }
2675
2676 /* Return next variable after NODE. */
2677 inline varpool_node *
2678 symbol_table::next_variable (varpool_node *node)
2679 {
2680 symtab_node *node1 = node->next;
2681 for (; node1; node1 = node1->next)
2682 if (varpool_node *vnode1 = dyn_cast <varpool_node *> (node1))
2683 return vnode1;
2684 return NULL;
2685 }
2686 /* Walk all variables. */
2687 #define FOR_EACH_VARIABLE(node) \
2688 for ((node) = symtab->first_variable (); \
2689 (node); \
2690 (node) = symtab->next_variable ((node)))
2691
2692 /* Return first static variable with initializer. */
2693 inline varpool_node *
2694 symbol_table::first_static_initializer (void)
2695 {
2696 symtab_node *node;
2697 for (node = nodes; node; node = node->next)
2698 {
2699 varpool_node *vnode = dyn_cast <varpool_node *> (node);
2700 if (vnode && DECL_INITIAL (node->decl))
2701 return vnode;
2702 }
2703 return NULL;
2704 }
2705
2706 /* Return next static variable with initializer after NODE. */
2707 inline varpool_node *
2708 symbol_table::next_static_initializer (varpool_node *node)
2709 {
2710 symtab_node *node1 = node->next;
2711 for (; node1; node1 = node1->next)
2712 {
2713 varpool_node *vnode1 = dyn_cast <varpool_node *> (node1);
2714 if (vnode1 && DECL_INITIAL (node1->decl))
2715 return vnode1;
2716 }
2717 return NULL;
2718 }
2719
2720 /* Walk all static variables with initializer set. */
2721 #define FOR_EACH_STATIC_INITIALIZER(node) \
2722 for ((node) = symtab->first_static_initializer (); (node); \
2723 (node) = symtab->next_static_initializer (node))
2724
2725 /* Return first static variable with definition. */
2726 inline varpool_node *
2727 symbol_table::first_defined_variable (void)
2728 {
2729 symtab_node *node;
2730 for (node = nodes; node; node = node->next)
2731 {
2732 varpool_node *vnode = dyn_cast <varpool_node *> (node);
2733 if (vnode && vnode->definition)
2734 return vnode;
2735 }
2736 return NULL;
2737 }
2738
2739 /* Return next static variable with definition after NODE. */
2740 inline varpool_node *
2741 symbol_table::next_defined_variable (varpool_node *node)
2742 {
2743 symtab_node *node1 = node->next;
2744 for (; node1; node1 = node1->next)
2745 {
2746 varpool_node *vnode1 = dyn_cast <varpool_node *> (node1);
2747 if (vnode1 && vnode1->definition)
2748 return vnode1;
2749 }
2750 return NULL;
2751 }
2752 /* Walk all variables with definitions in current unit. */
2753 #define FOR_EACH_DEFINED_VARIABLE(node) \
2754 for ((node) = symtab->first_defined_variable (); (node); \
2755 (node) = symtab->next_defined_variable (node))
2756
2757 /* Return first function with body defined. */
2758 inline cgraph_node *
2759 symbol_table::first_defined_function (void)
2760 {
2761 symtab_node *node;
2762 for (node = nodes; node; node = node->next)
2763 {
2764 cgraph_node *cn = dyn_cast <cgraph_node *> (node);
2765 if (cn && cn->definition)
2766 return cn;
2767 }
2768 return NULL;
2769 }
2770
2771 /* Return next function with body defined after NODE. */
2772 inline cgraph_node *
2773 symbol_table::next_defined_function (cgraph_node *node)
2774 {
2775 symtab_node *node1 = node->next;
2776 for (; node1; node1 = node1->next)
2777 {
2778 cgraph_node *cn1 = dyn_cast <cgraph_node *> (node1);
2779 if (cn1 && cn1->definition)
2780 return cn1;
2781 }
2782 return NULL;
2783 }
2784
2785 /* Walk all functions with body defined. */
2786 #define FOR_EACH_DEFINED_FUNCTION(node) \
2787 for ((node) = symtab->first_defined_function (); (node); \
2788 (node) = symtab->next_defined_function ((node)))
2789
2790 /* Return first function. */
2791 inline cgraph_node *
2792 symbol_table::first_function (void)
2793 {
2794 symtab_node *node;
2795 for (node = nodes; node; node = node->next)
2796 if (cgraph_node *cn = dyn_cast <cgraph_node *> (node))
2797 return cn;
2798 return NULL;
2799 }
2800
2801 /* Return next function. */
2802 inline cgraph_node *
2803 symbol_table::next_function (cgraph_node *node)
2804 {
2805 symtab_node *node1 = node->next;
2806 for (; node1; node1 = node1->next)
2807 if (cgraph_node *cn1 = dyn_cast <cgraph_node *> (node1))
2808 return cn1;
2809 return NULL;
2810 }
2811
2812 /* Return first function with body defined. */
2813 inline cgraph_node *
2814 symbol_table::first_function_with_gimple_body (void)
2815 {
2816 symtab_node *node;
2817 for (node = nodes; node; node = node->next)
2818 {
2819 cgraph_node *cn = dyn_cast <cgraph_node *> (node);
2820 if (cn && cn->has_gimple_body_p ())
2821 return cn;
2822 }
2823 return NULL;
2824 }
2825
2826 /* Return next reachable static variable with initializer after NODE. */
2827 inline cgraph_node *
2828 symbol_table::next_function_with_gimple_body (cgraph_node *node)
2829 {
2830 symtab_node *node1 = node->next;
2831 for (; node1; node1 = node1->next)
2832 {
2833 cgraph_node *cn1 = dyn_cast <cgraph_node *> (node1);
2834 if (cn1 && cn1->has_gimple_body_p ())
2835 return cn1;
2836 }
2837 return NULL;
2838 }
2839
2840 /* Walk all functions. */
2841 #define FOR_EACH_FUNCTION(node) \
2842 for ((node) = symtab->first_function (); (node); \
2843 (node) = symtab->next_function ((node)))
2844
2845 /* Return true when callgraph node is a function with Gimple body defined
2846 in current unit. Functions can also be define externally or they
2847 can be thunks with no Gimple representation.
2848
2849 Note that at WPA stage, the function body may not be present in memory. */
2850
2851 inline bool
2852 cgraph_node::has_gimple_body_p (void)
2853 {
2854 return definition && !thunk.thunk_p && !alias;
2855 }
2856
2857 /* Walk all functions with body defined. */
2858 #define FOR_EACH_FUNCTION_WITH_GIMPLE_BODY(node) \
2859 for ((node) = symtab->first_function_with_gimple_body (); (node); \
2860 (node) = symtab->next_function_with_gimple_body (node))
2861
2862 /* Uniquize all constants that appear in memory.
2863 Each constant in memory thus far output is recorded
2864 in `const_desc_table'. */
2865
2866 struct GTY((for_user)) constant_descriptor_tree {
2867 /* A MEM for the constant. */
2868 rtx rtl;
2869
2870 /* The value of the constant. */
2871 tree value;
2872
2873 /* Hash of value. Computing the hash from value each time
2874 hashfn is called can't work properly, as that means recursive
2875 use of the hash table during hash table expansion. */
2876 hashval_t hash;
2877 };
2878
2879 /* Return true when function is only called directly or it has alias.
2880 i.e. it is not externally visible, address was not taken and
2881 it is not used in any other non-standard way. */
2882
2883 inline bool
2884 cgraph_node::only_called_directly_or_aliased_p (void)
2885 {
2886 gcc_assert (!global.inlined_to);
2887 return (!force_output && !address_taken
2888 && !used_from_other_partition
2889 && !DECL_VIRTUAL_P (decl)
2890 && !DECL_STATIC_CONSTRUCTOR (decl)
2891 && !DECL_STATIC_DESTRUCTOR (decl)
2892 && !used_from_object_file_p ()
2893 && !externally_visible);
2894 }
2895
2896 /* Return true when function can be removed from callgraph
2897 if all direct calls are eliminated. */
2898
2899 inline bool
2900 cgraph_node::can_remove_if_no_direct_calls_and_refs_p (void)
2901 {
2902 gcc_checking_assert (!global.inlined_to);
2903 /* Instrumentation clones should not be removed before
2904 instrumentation happens. New callers may appear after
2905 instrumentation. */
2906 if (instrumentation_clone
2907 && !chkp_function_instrumented_p (decl))
2908 return false;
2909 /* Extern inlines can always go, we will use the external definition. */
2910 if (DECL_EXTERNAL (decl))
2911 return true;
2912 /* When function is needed, we can not remove it. */
2913 if (force_output || used_from_other_partition)
2914 return false;
2915 if (DECL_STATIC_CONSTRUCTOR (decl)
2916 || DECL_STATIC_DESTRUCTOR (decl))
2917 return false;
2918 /* Only COMDAT functions can be removed if externally visible. */
2919 if (externally_visible
2920 && (!DECL_COMDAT (decl)
2921 || forced_by_abi
2922 || used_from_object_file_p ()))
2923 return false;
2924 return true;
2925 }
2926
2927 /* Verify cgraph, if consistency checking is enabled. */
2928
2929 inline void
2930 cgraph_node::checking_verify_cgraph_nodes (void)
2931 {
2932 if (flag_checking)
2933 cgraph_node::verify_cgraph_nodes ();
2934 }
2935
2936 /* Return true when variable can be removed from variable pool
2937 if all direct calls are eliminated. */
2938
2939 inline bool
2940 varpool_node::can_remove_if_no_refs_p (void)
2941 {
2942 if (DECL_EXTERNAL (decl))
2943 return true;
2944 return (!force_output && !used_from_other_partition
2945 && ((DECL_COMDAT (decl)
2946 && !forced_by_abi
2947 && !used_from_object_file_p ())
2948 || !externally_visible
2949 || DECL_HAS_VALUE_EXPR_P (decl)));
2950 }
2951
2952 /* Return true when all references to variable must be visible in ipa_ref_list.
2953 i.e. if the variable is not externally visible or not used in some magic
2954 way (asm statement or such).
2955 The magic uses are all summarized in force_output flag. */
2956
2957 inline bool
2958 varpool_node::all_refs_explicit_p ()
2959 {
2960 return (definition
2961 && !externally_visible
2962 && !used_from_other_partition
2963 && !force_output);
2964 }
2965
2966 struct tree_descriptor_hasher : ggc_ptr_hash<constant_descriptor_tree>
2967 {
2968 static hashval_t hash (constant_descriptor_tree *);
2969 static bool equal (constant_descriptor_tree *, constant_descriptor_tree *);
2970 };
2971
2972 /* Constant pool accessor function. */
2973 hash_table<tree_descriptor_hasher> *constant_pool_htab (void);
2974
2975 /* Return node that alias is aliasing. */
2976
2977 inline cgraph_node *
2978 cgraph_node::get_alias_target (void)
2979 {
2980 return dyn_cast <cgraph_node *> (symtab_node::get_alias_target ());
2981 }
2982
2983 /* Return node that alias is aliasing. */
2984
2985 inline varpool_node *
2986 varpool_node::get_alias_target (void)
2987 {
2988 return dyn_cast <varpool_node *> (symtab_node::get_alias_target ());
2989 }
2990
2991 /* Walk the alias chain to return the symbol NODE is alias of.
2992 If NODE is not an alias, return NODE.
2993 When AVAILABILITY is non-NULL, get minimal availability in the chain.
2994 When REF is non-NULL, assume that reference happens in symbol REF
2995 when determining the availability. */
2996
2997 inline symtab_node *
2998 symtab_node::ultimate_alias_target (enum availability *availability,
2999 symtab_node *ref)
3000 {
3001 if (!alias)
3002 {
3003 if (availability)
3004 *availability = get_availability (ref);
3005 return this;
3006 }
3007
3008 return ultimate_alias_target_1 (availability, ref);
3009 }
3010
3011 /* Given function symbol, walk the alias chain to return the function node
3012 is alias of. Do not walk through thunks.
3013 When AVAILABILITY is non-NULL, get minimal availability in the chain.
3014 When REF is non-NULL, assume that reference happens in symbol REF
3015 when determining the availability. */
3016
3017 inline cgraph_node *
3018 cgraph_node::ultimate_alias_target (enum availability *availability,
3019 symtab_node *ref)
3020 {
3021 cgraph_node *n = dyn_cast <cgraph_node *>
3022 (symtab_node::ultimate_alias_target (availability, ref));
3023 if (!n && availability)
3024 *availability = AVAIL_NOT_AVAILABLE;
3025 return n;
3026 }
3027
3028 /* For given variable pool node, walk the alias chain to return the function
3029 the variable is alias of. Do not walk through thunks.
3030 When AVAILABILITY is non-NULL, get minimal availability in the chain.
3031 When REF is non-NULL, assume that reference happens in symbol REF
3032 when determining the availability. */
3033
3034 inline varpool_node *
3035 varpool_node::ultimate_alias_target (availability *availability,
3036 symtab_node *ref)
3037 {
3038 varpool_node *n = dyn_cast <varpool_node *>
3039 (symtab_node::ultimate_alias_target (availability, ref));
3040
3041 if (!n && availability)
3042 *availability = AVAIL_NOT_AVAILABLE;
3043 return n;
3044 }
3045
3046 /* Set callee N of call graph edge and add it to the corresponding set of
3047 callers. */
3048
3049 inline void
3050 cgraph_edge::set_callee (cgraph_node *n)
3051 {
3052 prev_caller = NULL;
3053 if (n->callers)
3054 n->callers->prev_caller = this;
3055 next_caller = n->callers;
3056 n->callers = this;
3057 callee = n;
3058 }
3059
3060 /* Redirect callee of the edge to N. The function does not update underlying
3061 call expression. */
3062
3063 inline void
3064 cgraph_edge::redirect_callee (cgraph_node *n)
3065 {
3066 /* Remove from callers list of the current callee. */
3067 remove_callee ();
3068
3069 /* Insert to callers list of the new callee. */
3070 set_callee (n);
3071 }
3072
3073 /* Return true when the edge represents a direct recursion. */
3074
3075 inline bool
3076 cgraph_edge::recursive_p (void)
3077 {
3078 cgraph_node *c = callee->ultimate_alias_target ();
3079 if (caller->global.inlined_to)
3080 return caller->global.inlined_to->decl == c->decl;
3081 else
3082 return caller->decl == c->decl;
3083 }
3084
3085 /* Remove the edge from the list of the callers of the callee. */
3086
3087 inline void
3088 cgraph_edge::remove_callee (void)
3089 {
3090 gcc_assert (!indirect_unknown_callee);
3091 if (prev_caller)
3092 prev_caller->next_caller = next_caller;
3093 if (next_caller)
3094 next_caller->prev_caller = prev_caller;
3095 if (!prev_caller)
3096 callee->callers = next_caller;
3097 }
3098
3099 /* Return true if call must bind to current definition. */
3100
3101 inline bool
3102 cgraph_edge::binds_to_current_def_p ()
3103 {
3104 if (callee)
3105 return callee->binds_to_current_def_p (caller);
3106 else
3107 return false;
3108 }
3109
3110 /* Return true if the TM_CLONE bit is set for a given FNDECL. */
3111 static inline bool
3112 decl_is_tm_clone (const_tree fndecl)
3113 {
3114 cgraph_node *n = cgraph_node::get (fndecl);
3115 if (n)
3116 return n->tm_clone;
3117 return false;
3118 }
3119
3120 /* Likewise indicate that a node is needed, i.e. reachable via some
3121 external means. */
3122
3123 inline void
3124 cgraph_node::mark_force_output (void)
3125 {
3126 force_output = 1;
3127 gcc_checking_assert (!global.inlined_to);
3128 }
3129
3130 /* Return true if function should be optimized for size. */
3131
3132 inline bool
3133 cgraph_node::optimize_for_size_p (void)
3134 {
3135 if (opt_for_fn (decl, optimize_size))
3136 return true;
3137 if (frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
3138 return true;
3139 else
3140 return false;
3141 }
3142
3143 /* Return symtab_node for NODE or create one if it is not present
3144 in symtab. */
3145
3146 inline symtab_node *
3147 symtab_node::get_create (tree node)
3148 {
3149 if (TREE_CODE (node) == VAR_DECL)
3150 return varpool_node::get_create (node);
3151 else
3152 return cgraph_node::get_create (node);
3153 }
3154
3155 /* Return availability of NODE when referenced from REF. */
3156
3157 inline enum availability
3158 symtab_node::get_availability (symtab_node *ref)
3159 {
3160 if (is_a <cgraph_node *> (this))
3161 return dyn_cast <cgraph_node *> (this)->get_availability (ref);
3162 else
3163 return dyn_cast <varpool_node *> (this)->get_availability (ref);
3164 }
3165
3166 /* Call calback on symtab node and aliases associated to this node.
3167 When INCLUDE_OVERWRITABLE is false, overwritable symbols are skipped. */
3168
3169 inline bool
3170 symtab_node::call_for_symbol_and_aliases (bool (*callback) (symtab_node *,
3171 void *),
3172 void *data,
3173 bool include_overwritable)
3174 {
3175 if (include_overwritable
3176 || get_availability () > AVAIL_INTERPOSABLE)
3177 {
3178 if (callback (this, data))
3179 return true;
3180 }
3181 if (has_aliases_p ())
3182 return call_for_symbol_and_aliases_1 (callback, data, include_overwritable);
3183 return false;
3184 }
3185
3186 /* Call callback on function and aliases associated to the function.
3187 When INCLUDE_OVERWRITABLE is false, overwritable symbols are
3188 skipped. */
3189
3190 inline bool
3191 cgraph_node::call_for_symbol_and_aliases (bool (*callback) (cgraph_node *,
3192 void *),
3193 void *data,
3194 bool include_overwritable)
3195 {
3196 if (include_overwritable
3197 || get_availability () > AVAIL_INTERPOSABLE)
3198 {
3199 if (callback (this, data))
3200 return true;
3201 }
3202 if (has_aliases_p ())
3203 return call_for_symbol_and_aliases_1 (callback, data, include_overwritable);
3204 return false;
3205 }
3206
3207 /* Call calback on varpool symbol and aliases associated to varpool symbol.
3208 When INCLUDE_OVERWRITABLE is false, overwritable symbols are
3209 skipped. */
3210
3211 inline bool
3212 varpool_node::call_for_symbol_and_aliases (bool (*callback) (varpool_node *,
3213 void *),
3214 void *data,
3215 bool include_overwritable)
3216 {
3217 if (include_overwritable
3218 || get_availability () > AVAIL_INTERPOSABLE)
3219 {
3220 if (callback (this, data))
3221 return true;
3222 }
3223 if (has_aliases_p ())
3224 return call_for_symbol_and_aliases_1 (callback, data, include_overwritable);
3225 return false;
3226 }
3227
3228 /* Return true if refernece may be used in address compare. */
3229
3230 inline bool
3231 ipa_ref::address_matters_p ()
3232 {
3233 if (use != IPA_REF_ADDR)
3234 return false;
3235 /* Addresses taken from virtual tables are never compared. */
3236 if (is_a <varpool_node *> (referring)
3237 && DECL_VIRTUAL_P (referring->decl))
3238 return false;
3239 return referred->address_can_be_compared_p ();
3240 }
3241
3242 /* Build polymorphic call context for indirect call E. */
3243
3244 inline
3245 ipa_polymorphic_call_context::ipa_polymorphic_call_context (cgraph_edge *e)
3246 {
3247 gcc_checking_assert (e->indirect_info->polymorphic);
3248 *this = e->indirect_info->context;
3249 }
3250
3251 /* Build empty "I know nothing" context. */
3252
3253 inline
3254 ipa_polymorphic_call_context::ipa_polymorphic_call_context ()
3255 {
3256 clear_speculation ();
3257 clear_outer_type ();
3258 invalid = false;
3259 }
3260
3261 /* Make context non-speculative. */
3262
3263 inline void
3264 ipa_polymorphic_call_context::clear_speculation ()
3265 {
3266 speculative_outer_type = NULL;
3267 speculative_offset = 0;
3268 speculative_maybe_derived_type = false;
3269 }
3270
3271 /* Produce context specifying all derrived types of OTR_TYPE. If OTR_TYPE is
3272 NULL, the context is set to dummy "I know nothing" setting. */
3273
3274 inline void
3275 ipa_polymorphic_call_context::clear_outer_type (tree otr_type)
3276 {
3277 outer_type = otr_type ? TYPE_MAIN_VARIANT (otr_type) : NULL;
3278 offset = 0;
3279 maybe_derived_type = true;
3280 maybe_in_construction = true;
3281 dynamic = true;
3282 }
3283
3284 /* Adjust all offsets in contexts by OFF bits. */
3285
3286 inline void
3287 ipa_polymorphic_call_context::offset_by (HOST_WIDE_INT off)
3288 {
3289 if (outer_type)
3290 offset += off;
3291 if (speculative_outer_type)
3292 speculative_offset += off;
3293 }
3294
3295 /* Return TRUE if context is fully useless. */
3296
3297 inline bool
3298 ipa_polymorphic_call_context::useless_p () const
3299 {
3300 return (!outer_type && !speculative_outer_type);
3301 }
3302
3303 /* Return true if NODE is local. Instrumentation clones are counted as local
3304 only when original function is local. */
3305
3306 static inline bool
3307 cgraph_local_p (cgraph_node *node)
3308 {
3309 if (!node->instrumentation_clone || !node->instrumented_version)
3310 return node->local.local;
3311
3312 return node->local.local && node->instrumented_version->local.local;
3313 }
3314
3315 /* When using fprintf (or similar), problems can arise with
3316 transient generated strings. Many string-generation APIs
3317 only support one result being alive at once (e.g. by
3318 returning a pointer to a statically-allocated buffer).
3319
3320 If there is more than one generated string within one
3321 fprintf call: the first string gets evicted or overwritten
3322 by the second, before fprintf is fully evaluated.
3323 See e.g. PR/53136.
3324
3325 This function provides a workaround for this, by providing
3326 a simple way to create copies of these transient strings,
3327 without the need to have explicit cleanup:
3328
3329 fprintf (dumpfile, "string 1: %s string 2:%s\n",
3330 xstrdup_for_dump (EXPR_1),
3331 xstrdup_for_dump (EXPR_2));
3332
3333 This is actually a simple wrapper around ggc_strdup, but
3334 the name documents the intent. We require that no GC can occur
3335 within the fprintf call. */
3336
3337 static inline const char *
3338 xstrdup_for_dump (const char *transient_str)
3339 {
3340 return ggc_strdup (transient_str);
3341 }
3342
3343 #endif /* GCC_CGRAPH_H */