re PR bootstrap/80867 (gnat bootstrap broken on powerpc64le-linux-gnu with -O3)
[gcc.git] / gcc / cgraph.h
1 /* Callgraph handling code.
2 Copyright (C) 2003-2018 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #ifndef GCC_CGRAPH_H
22 #define GCC_CGRAPH_H
23
24 #include "profile-count.h"
25 #include "ipa-ref.h"
26 #include "plugin-api.h"
27
28 class ipa_opt_pass_d;
29 typedef ipa_opt_pass_d *ipa_opt_pass;
30
31 /* Symbol table consists of functions and variables.
32 TODO: add labels and CONST_DECLs. */
33 enum symtab_type
34 {
35 SYMTAB_SYMBOL,
36 SYMTAB_FUNCTION,
37 SYMTAB_VARIABLE
38 };
39
40 /* Section names are stored as reference counted strings in GGC safe hashtable
41 (to make them survive through PCH). */
42
43 struct GTY((for_user)) section_hash_entry
44 {
45 int ref_count;
46 char *name; /* As long as this datastructure stays in GGC, we can not put
47 string at the tail of structure of GGC dies in horrible
48 way */
49 };
50
51 struct section_name_hasher : ggc_ptr_hash<section_hash_entry>
52 {
53 typedef const char *compare_type;
54
55 static hashval_t hash (section_hash_entry *);
56 static bool equal (section_hash_entry *, const char *);
57 };
58
59 enum availability
60 {
61 /* Not yet set by cgraph_function_body_availability. */
62 AVAIL_UNSET,
63 /* Function body/variable initializer is unknown. */
64 AVAIL_NOT_AVAILABLE,
65 /* Function body/variable initializer is known but might be replaced
66 by a different one from other compilation unit and thus needs to
67 be dealt with a care. Like AVAIL_NOT_AVAILABLE it can have
68 arbitrary side effects on escaping variables and functions, while
69 like AVAILABLE it might access static variables. */
70 AVAIL_INTERPOSABLE,
71 /* Function body/variable initializer is known and will be used in final
72 program. */
73 AVAIL_AVAILABLE,
74 /* Function body/variable initializer is known and all it's uses are
75 explicitly visible within current unit (ie it's address is never taken and
76 it is not exported to other units). Currently used only for functions. */
77 AVAIL_LOCAL
78 };
79
80 /* Classification of symbols WRT partitioning. */
81 enum symbol_partitioning_class
82 {
83 /* External declarations are ignored by partitioning algorithms and they are
84 added into the boundary later via compute_ltrans_boundary. */
85 SYMBOL_EXTERNAL,
86 /* Partitioned symbols are pur into one of partitions. */
87 SYMBOL_PARTITION,
88 /* Duplicated symbols (such as comdat or constant pool references) are
89 copied into every node needing them via add_symbol_to_partition. */
90 SYMBOL_DUPLICATE
91 };
92
93 /* Base of all entries in the symbol table.
94 The symtab_node is inherited by cgraph and varpol nodes. */
95 class GTY((desc ("%h.type"), tag ("SYMTAB_SYMBOL"),
96 chain_next ("%h.next"), chain_prev ("%h.previous")))
97 symtab_node
98 {
99 public:
100 /* Return name. */
101 const char *name () const;
102
103 /* Return dump name. */
104 const char *dump_name () const;
105
106 /* Return asm name. */
107 const char *asm_name () const;
108
109 /* Return dump name with assembler name. */
110 const char *dump_asm_name () const;
111
112 /* Add node into symbol table. This function is not used directly, but via
113 cgraph/varpool node creation routines. */
114 void register_symbol (void);
115
116 /* Remove symbol from symbol table. */
117 void remove (void);
118
119 /* Dump symtab node to F. */
120 void dump (FILE *f);
121
122 /* Dump symtab node to stderr. */
123 void DEBUG_FUNCTION debug (void);
124
125 /* Verify consistency of node. */
126 void DEBUG_FUNCTION verify (void);
127
128 /* Return ipa reference from this symtab_node to
129 REFERED_NODE or REFERED_VARPOOL_NODE. USE_TYPE specify type
130 of the use and STMT the statement (if it exists). */
131 ipa_ref *create_reference (symtab_node *referred_node,
132 enum ipa_ref_use use_type);
133
134 /* Return ipa reference from this symtab_node to
135 REFERED_NODE or REFERED_VARPOOL_NODE. USE_TYPE specify type
136 of the use and STMT the statement (if it exists). */
137 ipa_ref *create_reference (symtab_node *referred_node,
138 enum ipa_ref_use use_type, gimple *stmt);
139
140 /* If VAL is a reference to a function or a variable, add a reference from
141 this symtab_node to the corresponding symbol table node. Return the new
142 reference or NULL if none was created. */
143 ipa_ref *maybe_create_reference (tree val, gimple *stmt);
144
145 /* Clone all references from symtab NODE to this symtab_node. */
146 void clone_references (symtab_node *node);
147
148 /* Remove all stmt references in non-speculative references.
149 Those are not maintained during inlining & clonning.
150 The exception are speculative references that are updated along
151 with callgraph edges associated with them. */
152 void clone_referring (symtab_node *node);
153
154 /* Clone reference REF to this symtab_node and set its stmt to STMT. */
155 ipa_ref *clone_reference (ipa_ref *ref, gimple *stmt);
156
157 /* Find the structure describing a reference to REFERRED_NODE
158 and associated with statement STMT. */
159 ipa_ref *find_reference (symtab_node *referred_node, gimple *stmt,
160 unsigned int lto_stmt_uid);
161
162 /* Remove all references that are associated with statement STMT. */
163 void remove_stmt_references (gimple *stmt);
164
165 /* Remove all stmt references in non-speculative references.
166 Those are not maintained during inlining & clonning.
167 The exception are speculative references that are updated along
168 with callgraph edges associated with them. */
169 void clear_stmts_in_references (void);
170
171 /* Remove all references in ref list. */
172 void remove_all_references (void);
173
174 /* Remove all referring items in ref list. */
175 void remove_all_referring (void);
176
177 /* Dump references in ref list to FILE. */
178 void dump_references (FILE *file);
179
180 /* Dump referring in list to FILE. */
181 void dump_referring (FILE *);
182
183 /* Get number of references for this node. */
184 inline unsigned num_references (void)
185 {
186 return ref_list.references ? ref_list.references->length () : 0;
187 }
188
189 /* Iterates I-th reference in the list, REF is also set. */
190 ipa_ref *iterate_reference (unsigned i, ipa_ref *&ref);
191
192 /* Iterates I-th referring item in the list, REF is also set. */
193 ipa_ref *iterate_referring (unsigned i, ipa_ref *&ref);
194
195 /* Iterates I-th referring alias item in the list, REF is also set. */
196 ipa_ref *iterate_direct_aliases (unsigned i, ipa_ref *&ref);
197
198 /* Return true if symtab node and TARGET represents
199 semantically equivalent symbols. */
200 bool semantically_equivalent_p (symtab_node *target);
201
202 /* Classify symbol symtab node for partitioning. */
203 enum symbol_partitioning_class get_partitioning_class (void);
204
205 /* Return comdat group. */
206 tree get_comdat_group ()
207 {
208 return x_comdat_group;
209 }
210
211 /* Return comdat group as identifier_node. */
212 tree get_comdat_group_id ()
213 {
214 if (x_comdat_group && TREE_CODE (x_comdat_group) != IDENTIFIER_NODE)
215 x_comdat_group = DECL_ASSEMBLER_NAME (x_comdat_group);
216 return x_comdat_group;
217 }
218
219 /* Set comdat group. */
220 void set_comdat_group (tree group)
221 {
222 gcc_checking_assert (!group || TREE_CODE (group) == IDENTIFIER_NODE
223 || DECL_P (group));
224 x_comdat_group = group;
225 }
226
227 /* Return section as string. */
228 const char * get_section ()
229 {
230 if (!x_section)
231 return NULL;
232 return x_section->name;
233 }
234
235 /* Remove node from same comdat group. */
236 void remove_from_same_comdat_group (void);
237
238 /* Add this symtab_node to the same comdat group that OLD is in. */
239 void add_to_same_comdat_group (symtab_node *old_node);
240
241 /* Dissolve the same_comdat_group list in which NODE resides. */
242 void dissolve_same_comdat_group_list (void);
243
244 /* Return true when symtab_node is known to be used from other (non-LTO)
245 object file. Known only when doing LTO via linker plugin. */
246 bool used_from_object_file_p (void);
247
248 /* Walk the alias chain to return the symbol NODE is alias of.
249 If NODE is not an alias, return NODE.
250 When AVAILABILITY is non-NULL, get minimal availability in the chain.
251 When REF is non-NULL, assume that reference happens in symbol REF
252 when determining the availability. */
253 symtab_node *ultimate_alias_target (enum availability *avail = NULL,
254 struct symtab_node *ref = NULL);
255
256 /* Return next reachable static symbol with initializer after NODE. */
257 inline symtab_node *next_defined_symbol (void);
258
259 /* Add reference recording that symtab node is alias of TARGET.
260 If TRANSPARENT is true make the alias to be transparent alias.
261 The function can fail in the case of aliasing cycles; in this case
262 it returns false. */
263 bool resolve_alias (symtab_node *target, bool transparent = false);
264
265 /* C++ FE sometimes change linkage flags after producing same
266 body aliases. */
267 void fixup_same_cpp_alias_visibility (symtab_node *target);
268
269 /* Call callback on symtab node and aliases associated to this node.
270 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
271 skipped. */
272 bool call_for_symbol_and_aliases (bool (*callback) (symtab_node *, void *),
273 void *data,
274 bool include_overwrite);
275
276 /* If node can not be interposable by static or dynamic linker to point to
277 different definition, return this symbol. Otherwise look for alias with
278 such property and if none exists, introduce new one. */
279 symtab_node *noninterposable_alias (void);
280
281 /* Return node that alias is aliasing. */
282 inline symtab_node *get_alias_target (void);
283
284 /* Set section for symbol and its aliases. */
285 void set_section (const char *section);
286
287 /* Set section, do not recurse into aliases.
288 When one wants to change section of symbol and its aliases,
289 use set_section. */
290 void set_section_for_node (const char *section);
291
292 /* Set initialization priority to PRIORITY. */
293 void set_init_priority (priority_type priority);
294
295 /* Return the initialization priority. */
296 priority_type get_init_priority ();
297
298 /* Return availability of NODE when referenced from REF. */
299 enum availability get_availability (symtab_node *ref = NULL);
300
301 /* Return true if NODE binds to current definition in final executable
302 when referenced from REF. If REF is NULL return conservative value
303 for any reference. */
304 bool binds_to_current_def_p (symtab_node *ref = NULL);
305
306 /* Make DECL local. */
307 void make_decl_local (void);
308
309 /* Copy visibility from N. */
310 void copy_visibility_from (symtab_node *n);
311
312 /* Return desired alignment of the definition. This is NOT alignment useful
313 to access THIS, because THIS may be interposable and DECL_ALIGN should
314 be used instead. It however must be guaranteed when output definition
315 of THIS. */
316 unsigned int definition_alignment ();
317
318 /* Return true if alignment can be increased. */
319 bool can_increase_alignment_p ();
320
321 /* Increase alignment of symbol to ALIGN. */
322 void increase_alignment (unsigned int align);
323
324 /* Return true if list contains an alias. */
325 bool has_aliases_p (void);
326
327 /* Return true when the symbol is real symbol, i.e. it is not inline clone
328 or abstract function kept for debug info purposes only. */
329 bool real_symbol_p (void);
330
331 /* Determine if symbol declaration is needed. That is, visible to something
332 either outside this translation unit, something magic in the system
333 configury. This function is used just during symbol creation. */
334 bool needed_p (void);
335
336 /* Return true if this symbol is a function from the C frontend specified
337 directly in RTL form (with "__RTL"). */
338 bool native_rtl_p () const;
339
340 /* Return true when there are references to the node. */
341 bool referred_to_p (bool include_self = true);
342
343 /* Return true if symbol can be discarded by linker from the binary.
344 Assume that symbol is used (so there is no need to take into account
345 garbage collecting linkers)
346
347 This can happen for comdats, commons and weaks when they are previaled
348 by other definition at static linking time. */
349 inline bool
350 can_be_discarded_p (void)
351 {
352 return (DECL_EXTERNAL (decl)
353 || ((get_comdat_group ()
354 || DECL_COMMON (decl)
355 || (DECL_SECTION_NAME (decl) && DECL_WEAK (decl)))
356 && ((resolution != LDPR_PREVAILING_DEF
357 && resolution != LDPR_PREVAILING_DEF_IRONLY_EXP)
358 || flag_incremental_link)
359 && resolution != LDPR_PREVAILING_DEF_IRONLY));
360 }
361
362 /* Return true if NODE is local to a particular COMDAT group, and must not
363 be named from outside the COMDAT. This is used for C++ decloned
364 constructors. */
365 inline bool comdat_local_p (void)
366 {
367 return (same_comdat_group && !TREE_PUBLIC (decl));
368 }
369
370 /* Return true if ONE and TWO are part of the same COMDAT group. */
371 inline bool in_same_comdat_group_p (symtab_node *target);
372
373 /* Return true if symbol is known to be nonzero. */
374 bool nonzero_address ();
375
376 /* Return 0 if symbol is known to have different address than S2,
377 Return 1 if symbol is known to have same address as S2,
378 return 2 otherwise.
379
380 If MEMORY_ACCESSED is true, assume that both memory pointer to THIS
381 and S2 is going to be accessed. This eliminates the situations when
382 either THIS or S2 is NULL and is seful for comparing bases when deciding
383 about memory aliasing. */
384 int equal_address_to (symtab_node *s2, bool memory_accessed = false);
385
386 /* Return true if symbol's address may possibly be compared to other
387 symbol's address. */
388 bool address_matters_p ();
389
390 /* Return true if NODE's address can be compared. This use properties
391 of NODE only and does not look if the address is actually taken in
392 interesting way. For that use ADDRESS_MATTERS_P instead. */
393 bool address_can_be_compared_p (void);
394
395 /* Return symbol table node associated with DECL, if any,
396 and NULL otherwise. */
397 static inline symtab_node *get (const_tree decl)
398 {
399 /* Check that we are called for sane type of object - functions
400 and static or external variables. */
401 gcc_checking_assert (TREE_CODE (decl) == FUNCTION_DECL
402 || (TREE_CODE (decl) == VAR_DECL
403 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)
404 || in_lto_p)));
405 /* Check that the mapping is sane - perhaps this check can go away,
406 but at the moment frontends tends to corrupt the mapping by calling
407 memcpy/memset on the tree nodes. */
408 gcc_checking_assert (!decl->decl_with_vis.symtab_node
409 || decl->decl_with_vis.symtab_node->decl == decl);
410 return decl->decl_with_vis.symtab_node;
411 }
412
413 /* Try to find a symtab node for declaration DECL and if it does not
414 exist or if it corresponds to an inline clone, create a new one. */
415 static inline symtab_node * get_create (tree node);
416
417 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
418 Return NULL if there's no such node. */
419 static symtab_node *get_for_asmname (const_tree asmname);
420
421 /* Verify symbol table for internal consistency. */
422 static DEBUG_FUNCTION void verify_symtab_nodes (void);
423
424 /* Perform internal consistency checks, if they are enabled. */
425 static inline void checking_verify_symtab_nodes (void);
426
427 /* Type of the symbol. */
428 ENUM_BITFIELD (symtab_type) type : 8;
429
430 /* The symbols resolution. */
431 ENUM_BITFIELD (ld_plugin_symbol_resolution) resolution : 8;
432
433 /*** Flags representing the symbol type. ***/
434
435 /* True when symbol corresponds to a definition in current unit.
436 set via finalize_function or finalize_decl */
437 unsigned definition : 1;
438 /* True when symbol is an alias.
439 Set by ssemble_alias. */
440 unsigned alias : 1;
441 /* When true the alias is translated into its target symbol either by GCC
442 or assembler (it also may just be a duplicate declaration of the same
443 linker name).
444
445 Currently transparent aliases come in three different flavors
446 - aliases having the same assembler name as their target (aka duplicated
447 declarations). In this case the assembler names compare via
448 assembler_names_equal_p and weakref is false
449 - aliases that are renamed at a time being output to final file
450 by varasm.c. For those DECL_ASSEMBLER_NAME have
451 IDENTIFIER_TRANSPARENT_ALIAS set and thus also their assembler
452 name must be unique.
453 Weakrefs belong to this cateogry when we target assembler without
454 .weakref directive.
455 - weakrefs that are renamed by assembler via .weakref directive.
456 In this case the alias may or may not be definition (depending if
457 target declaration was seen by the compiler), weakref is set.
458 Unless we are before renaming statics, assembler names are different.
459
460 Given that we now support duplicate declarations, the second option is
461 redundant and will be removed. */
462 unsigned transparent_alias : 1;
463 /* True when alias is a weakref. */
464 unsigned weakref : 1;
465 /* C++ frontend produce same body aliases and extra name aliases for
466 virtual functions and vtables that are obviously equivalent.
467 Those aliases are bit special, especially because C++ frontend
468 visibility code is so ugly it can not get them right at first time
469 and their visibility needs to be copied from their "masters" at
470 the end of parsing. */
471 unsigned cpp_implicit_alias : 1;
472 /* Set once the definition was analyzed. The list of references and
473 other properties are built during analysis. */
474 unsigned analyzed : 1;
475 /* Set for write-only variables. */
476 unsigned writeonly : 1;
477 /* Visibility of symbol was used for further optimization; do not
478 permit further changes. */
479 unsigned refuse_visibility_changes : 1;
480
481 /*** Visibility and linkage flags. ***/
482
483 /* Set when function is visible by other units. */
484 unsigned externally_visible : 1;
485 /* Don't reorder to other symbols having this set. */
486 unsigned no_reorder : 1;
487 /* The symbol will be assumed to be used in an invisible way (like
488 by an toplevel asm statement). */
489 unsigned force_output : 1;
490 /* Like FORCE_OUTPUT, but in the case it is ABI requiring the symbol to be
491 exported. Unlike FORCE_OUTPUT this flag gets cleared to symbols promoted
492 to static and it does not inhibit optimization. */
493 unsigned forced_by_abi : 1;
494 /* True when the name is known to be unique and thus it does not need mangling. */
495 unsigned unique_name : 1;
496 /* Specify whether the section was set by user or by
497 compiler via -ffunction-sections. */
498 unsigned implicit_section : 1;
499 /* True when body and other characteristics have been removed by
500 symtab_remove_unreachable_nodes. */
501 unsigned body_removed : 1;
502
503 /*** WHOPR Partitioning flags.
504 These flags are used at ltrans stage when only part of the callgraph is
505 available. ***/
506
507 /* Set when variable is used from other LTRANS partition. */
508 unsigned used_from_other_partition : 1;
509 /* Set when function is available in the other LTRANS partition.
510 During WPA output it is used to mark nodes that are present in
511 multiple partitions. */
512 unsigned in_other_partition : 1;
513
514
515
516 /*** other flags. ***/
517
518 /* Set when symbol has address taken. */
519 unsigned address_taken : 1;
520 /* Set when init priority is set. */
521 unsigned in_init_priority_hash : 1;
522
523 /* Set when symbol needs to be streamed into LTO bytecode for LTO, or in case
524 of offloading, for separate compilation for a different target. */
525 unsigned need_lto_streaming : 1;
526
527 /* Set when symbol can be streamed into bytecode for offloading. */
528 unsigned offloadable : 1;
529
530
531 /* Ordering of all symtab entries. */
532 int order;
533
534 /* Declaration representing the symbol. */
535 tree decl;
536
537 /* Linked list of symbol table entries starting with symtab_nodes. */
538 symtab_node *next;
539 symtab_node *previous;
540
541 /* Linked list of symbols with the same asm name. There may be multiple
542 entries for single symbol name during LTO, because symbols are renamed
543 only after partitioning.
544
545 Because inline clones are kept in the assembler name has, they also produce
546 duplicate entries.
547
548 There are also several long standing bugs where frontends and builtin
549 code produce duplicated decls. */
550 symtab_node *next_sharing_asm_name;
551 symtab_node *previous_sharing_asm_name;
552
553 /* Circular list of nodes in the same comdat group if non-NULL. */
554 symtab_node *same_comdat_group;
555
556 /* Vectors of referring and referenced entities. */
557 ipa_ref_list ref_list;
558
559 /* Alias target. May be either DECL pointer or ASSEMBLER_NAME pointer
560 depending to what was known to frontend on the creation time.
561 Once alias is resolved, this pointer become NULL. */
562 tree alias_target;
563
564 /* File stream where this node is being written to. */
565 struct lto_file_decl_data * lto_file_data;
566
567 PTR GTY ((skip)) aux;
568
569 /* Comdat group the symbol is in. Can be private if GGC allowed that. */
570 tree x_comdat_group;
571
572 /* Section name. Again can be private, if allowed. */
573 section_hash_entry *x_section;
574
575 protected:
576 /* Dump base fields of symtab nodes to F. Not to be used directly. */
577 void dump_base (FILE *);
578
579 /* Verify common part of symtab node. */
580 bool DEBUG_FUNCTION verify_base (void);
581
582 /* Remove node from symbol table. This function is not used directly, but via
583 cgraph/varpool node removal routines. */
584 void unregister (void);
585
586 /* Return the initialization and finalization priority information for
587 DECL. If there is no previous priority information, a freshly
588 allocated structure is returned. */
589 struct symbol_priority_map *priority_info (void);
590
591 /* Worker for call_for_symbol_and_aliases_1. */
592 bool call_for_symbol_and_aliases_1 (bool (*callback) (symtab_node *, void *),
593 void *data,
594 bool include_overwrite);
595 private:
596 /* Worker for set_section. */
597 static bool set_section (symtab_node *n, void *s);
598
599 /* Worker for symtab_resolve_alias. */
600 static bool set_implicit_section (symtab_node *n, void *);
601
602 /* Worker searching noninterposable alias. */
603 static bool noninterposable_alias (symtab_node *node, void *data);
604
605 /* Worker for ultimate_alias_target. */
606 symtab_node *ultimate_alias_target_1 (enum availability *avail = NULL,
607 symtab_node *ref = NULL);
608
609 /* Get dump name with normal or assembly name. */
610 const char *get_dump_name (bool asm_name_p) const;
611 };
612
613 inline void
614 symtab_node::checking_verify_symtab_nodes (void)
615 {
616 if (flag_checking)
617 symtab_node::verify_symtab_nodes ();
618 }
619
620 /* Walk all aliases for NODE. */
621 #define FOR_EACH_ALIAS(node, alias) \
622 for (unsigned x_i = 0; node->iterate_direct_aliases (x_i, alias); x_i++)
623
624 /* This is the information that is put into the cgraph local structure
625 to recover a function. */
626 struct lto_file_decl_data;
627
628 extern const char * const cgraph_availability_names[];
629 extern const char * const ld_plugin_symbol_resolution_names[];
630 extern const char * const tls_model_names[];
631
632 /* Sub-structure of cgraph_node. Holds information about thunk, used only for
633 same body aliases.
634
635 Thunks are basically wrappers around methods which are introduced in case
636 of multiple inheritance in order to adjust the value of the "this" pointer
637 or of the returned value.
638
639 In the case of this-adjusting thunks, each back-end can override the
640 can_output_mi_thunk/output_mi_thunk target hooks to generate a minimal thunk
641 (with a tail call for instance) directly as assembly. For the default hook
642 or for the case where the can_output_mi_thunk hooks return false, the thunk
643 is gimplified and lowered using the regular machinery. */
644
645 struct GTY(()) cgraph_thunk_info {
646 /* Offset used to adjust "this". */
647 HOST_WIDE_INT fixed_offset;
648
649 /* Offset in the virtual table to get the offset to adjust "this". Valid iff
650 VIRTUAL_OFFSET_P is true. */
651 HOST_WIDE_INT virtual_value;
652
653 /* Thunk target, i.e. the method that this thunk wraps. Depending on the
654 TARGET_USE_LOCAL_THUNK_ALIAS_P macro, this may have to be a new alias. */
655 tree alias;
656
657 /* Nonzero for a "this" adjusting thunk and zero for a result adjusting
658 thunk. */
659 bool this_adjusting;
660
661 /* If true, this thunk is what we call a virtual thunk. In this case:
662 * for this-adjusting thunks, after the FIXED_OFFSET based adjustment is
663 done, add to the result the offset found in the vtable at:
664 vptr + VIRTUAL_VALUE
665 * for result-adjusting thunks, the FIXED_OFFSET adjustment is done after
666 the virtual one. */
667 bool virtual_offset_p;
668
669 /* ??? True for special kind of thunks, seems related to instrumentation. */
670 bool add_pointer_bounds_args;
671
672 /* Set to true when alias node (the cgraph_node to which this struct belong)
673 is a thunk. Access to any other fields is invalid if this is false. */
674 bool thunk_p;
675 };
676
677 /* Information about the function collected locally.
678 Available after function is analyzed. */
679
680 struct GTY(()) cgraph_local_info {
681 /* Set when function is visible in current compilation unit only and
682 its address is never taken. */
683 unsigned local : 1;
684
685 /* False when there is something makes versioning impossible. */
686 unsigned versionable : 1;
687
688 /* False when function calling convention and signature can not be changed.
689 This is the case when __builtin_apply_args is used. */
690 unsigned can_change_signature : 1;
691
692 /* True when the function has been originally extern inline, but it is
693 redefined now. */
694 unsigned redefined_extern_inline : 1;
695
696 /* True if the function may enter serial irrevocable mode. */
697 unsigned tm_may_enter_irr : 1;
698 };
699
700 /* Information about the function that needs to be computed globally
701 once compilation is finished. Available only with -funit-at-a-time. */
702
703 struct GTY(()) cgraph_global_info {
704 /* For inline clones this points to the function they will be
705 inlined into. */
706 cgraph_node *inlined_to;
707 };
708
709 /* Represent which DECL tree (or reference to such tree)
710 will be replaced by another tree while versioning. */
711 struct GTY(()) ipa_replace_map
712 {
713 /* The tree that will be replaced. */
714 tree old_tree;
715 /* The new (replacing) tree. */
716 tree new_tree;
717 /* Parameter number to replace, when old_tree is NULL. */
718 int parm_num;
719 /* True when a substitution should be done, false otherwise. */
720 bool replace_p;
721 /* True when we replace a reference to old_tree. */
722 bool ref_p;
723 };
724
725 struct GTY(()) cgraph_clone_info
726 {
727 vec<ipa_replace_map *, va_gc> *tree_map;
728 bitmap args_to_skip;
729 bitmap combined_args_to_skip;
730 };
731
732 enum cgraph_simd_clone_arg_type
733 {
734 SIMD_CLONE_ARG_TYPE_VECTOR,
735 SIMD_CLONE_ARG_TYPE_UNIFORM,
736 /* These are only for integer/pointer arguments passed by value. */
737 SIMD_CLONE_ARG_TYPE_LINEAR_CONSTANT_STEP,
738 SIMD_CLONE_ARG_TYPE_LINEAR_VARIABLE_STEP,
739 /* These 6 are only for reference type arguments or arguments passed
740 by reference. */
741 SIMD_CLONE_ARG_TYPE_LINEAR_REF_CONSTANT_STEP,
742 SIMD_CLONE_ARG_TYPE_LINEAR_REF_VARIABLE_STEP,
743 SIMD_CLONE_ARG_TYPE_LINEAR_UVAL_CONSTANT_STEP,
744 SIMD_CLONE_ARG_TYPE_LINEAR_UVAL_VARIABLE_STEP,
745 SIMD_CLONE_ARG_TYPE_LINEAR_VAL_CONSTANT_STEP,
746 SIMD_CLONE_ARG_TYPE_LINEAR_VAL_VARIABLE_STEP,
747 SIMD_CLONE_ARG_TYPE_MASK
748 };
749
750 /* Function arguments in the original function of a SIMD clone.
751 Supplementary data for `struct simd_clone'. */
752
753 struct GTY(()) cgraph_simd_clone_arg {
754 /* Original function argument as it originally existed in
755 DECL_ARGUMENTS. */
756 tree orig_arg;
757
758 /* orig_arg's function (or for extern functions type from
759 TYPE_ARG_TYPES). */
760 tree orig_type;
761
762 /* If argument is a vector, this holds the vector version of
763 orig_arg that after adjusting the argument types will live in
764 DECL_ARGUMENTS. Otherwise, this is NULL.
765
766 This basically holds:
767 vector(simdlen) __typeof__(orig_arg) new_arg. */
768 tree vector_arg;
769
770 /* vector_arg's type (or for extern functions new vector type. */
771 tree vector_type;
772
773 /* If argument is a vector, this holds the array where the simd
774 argument is held while executing the simd clone function. This
775 is a local variable in the cloned function. Its content is
776 copied from vector_arg upon entry to the clone.
777
778 This basically holds:
779 __typeof__(orig_arg) simd_array[simdlen]. */
780 tree simd_array;
781
782 /* A SIMD clone's argument can be either linear (constant or
783 variable), uniform, or vector. */
784 enum cgraph_simd_clone_arg_type arg_type;
785
786 /* For arg_type SIMD_CLONE_ARG_TYPE_LINEAR_*CONSTANT_STEP this is
787 the constant linear step, if arg_type is
788 SIMD_CLONE_ARG_TYPE_LINEAR_*VARIABLE_STEP, this is index of
789 the uniform argument holding the step, otherwise 0. */
790 HOST_WIDE_INT linear_step;
791
792 /* Variable alignment if available, otherwise 0. */
793 unsigned int alignment;
794 };
795
796 /* Specific data for a SIMD function clone. */
797
798 struct GTY(()) cgraph_simd_clone {
799 /* Number of words in the SIMD lane associated with this clone. */
800 unsigned int simdlen;
801
802 /* Number of annotated function arguments in `args'. This is
803 usually the number of named arguments in FNDECL. */
804 unsigned int nargs;
805
806 /* Max hardware vector size in bits for integral vectors. */
807 unsigned int vecsize_int;
808
809 /* Max hardware vector size in bits for floating point vectors. */
810 unsigned int vecsize_float;
811
812 /* Machine mode of the mask argument(s), if they are to be passed
813 as bitmasks in integer argument(s). VOIDmode if masks are passed
814 as vectors of characteristic type. */
815 machine_mode mask_mode;
816
817 /* The mangling character for a given vector size. This is used
818 to determine the ISA mangling bit as specified in the Intel
819 Vector ABI. */
820 unsigned char vecsize_mangle;
821
822 /* True if this is the masked, in-branch version of the clone,
823 otherwise false. */
824 unsigned int inbranch : 1;
825
826 /* Doubly linked list of SIMD clones. */
827 cgraph_node *prev_clone, *next_clone;
828
829 /* Original cgraph node the SIMD clones were created for. */
830 cgraph_node *origin;
831
832 /* Annotated function arguments for the original function. */
833 cgraph_simd_clone_arg GTY((length ("%h.nargs"))) args[1];
834 };
835
836 /* Function Multiversioning info. */
837 struct GTY((for_user)) cgraph_function_version_info {
838 /* The cgraph_node for which the function version info is stored. */
839 cgraph_node *this_node;
840 /* Chains all the semantically identical function versions. The
841 first function in this chain is the version_info node of the
842 default function. */
843 cgraph_function_version_info *prev;
844 /* If this version node corresponds to a dispatcher for function
845 versions, this points to the version info node of the default
846 function, the first node in the chain. */
847 cgraph_function_version_info *next;
848 /* If this node corresponds to a function version, this points
849 to the dispatcher function decl, which is the function that must
850 be called to execute the right function version at run-time.
851
852 If this cgraph node is a dispatcher (if dispatcher_function is
853 true, in the cgraph_node struct) for function versions, this
854 points to resolver function, which holds the function body of the
855 dispatcher. The dispatcher decl is an alias to the resolver
856 function decl. */
857 tree dispatcher_resolver;
858 };
859
860 #define DEFCIFCODE(code, type, string) CIF_ ## code,
861 /* Reasons for inlining failures. */
862
863 enum cgraph_inline_failed_t {
864 #include "cif-code.def"
865 CIF_N_REASONS
866 };
867
868 enum cgraph_inline_failed_type_t
869 {
870 CIF_FINAL_NORMAL = 0,
871 CIF_FINAL_ERROR
872 };
873
874 struct cgraph_edge;
875
876 struct cgraph_edge_hasher : ggc_ptr_hash<cgraph_edge>
877 {
878 typedef gimple *compare_type;
879
880 static hashval_t hash (cgraph_edge *);
881 static hashval_t hash (gimple *);
882 static bool equal (cgraph_edge *, gimple *);
883 };
884
885 /* The cgraph data structure.
886 Each function decl has assigned cgraph_node listing callees and callers. */
887
888 struct GTY((tag ("SYMTAB_FUNCTION"))) cgraph_node : public symtab_node {
889 public:
890 /* Remove the node from cgraph and all inline clones inlined into it.
891 Skip however removal of FORBIDDEN_NODE and return true if it needs to be
892 removed. This allows to call the function from outer loop walking clone
893 tree. */
894 bool remove_symbol_and_inline_clones (cgraph_node *forbidden_node = NULL);
895
896 /* Record all references from cgraph_node that are taken
897 in statement STMT. */
898 void record_stmt_references (gimple *stmt);
899
900 /* Like cgraph_set_call_stmt but walk the clone tree and update all
901 clones sharing the same function body.
902 When WHOLE_SPECULATIVE_EDGES is true, all three components of
903 speculative edge gets updated. Otherwise we update only direct
904 call. */
905 void set_call_stmt_including_clones (gimple *old_stmt, gcall *new_stmt,
906 bool update_speculative = true);
907
908 /* Walk the alias chain to return the function cgraph_node is alias of.
909 Walk through thunk, too.
910 When AVAILABILITY is non-NULL, get minimal availability in the chain.
911 When REF is non-NULL, assume that reference happens in symbol REF
912 when determining the availability. */
913 cgraph_node *function_symbol (enum availability *avail = NULL,
914 struct symtab_node *ref = NULL);
915
916 /* Walk the alias chain to return the function cgraph_node is alias of.
917 Walk through non virtual thunks, too. Thus we return either a function
918 or a virtual thunk node.
919 When AVAILABILITY is non-NULL, get minimal availability in the chain.
920 When REF is non-NULL, assume that reference happens in symbol REF
921 when determining the availability. */
922 cgraph_node *function_or_virtual_thunk_symbol
923 (enum availability *avail = NULL,
924 struct symtab_node *ref = NULL);
925
926 /* Create node representing clone of N executed COUNT times. Decrease
927 the execution counts from original node too.
928 The new clone will have decl set to DECL that may or may not be the same
929 as decl of N.
930
931 When UPDATE_ORIGINAL is true, the counts are subtracted from the original
932 function's profile to reflect the fact that part of execution is handled
933 by node.
934 When CALL_DUPLICATOIN_HOOK is true, the ipa passes are acknowledged about
935 the new clone. Otherwise the caller is responsible for doing so later.
936
937 If the new node is being inlined into another one, NEW_INLINED_TO should be
938 the outline function the new one is (even indirectly) inlined to.
939 All hooks will see this in node's global.inlined_to, when invoked.
940 Can be NULL if the node is not inlined. SUFFIX is string that is appended
941 to the original name. */
942 cgraph_node *create_clone (tree decl, profile_count count,
943 bool update_original,
944 vec<cgraph_edge *> redirect_callers,
945 bool call_duplication_hook,
946 cgraph_node *new_inlined_to,
947 bitmap args_to_skip, const char *suffix = NULL);
948
949 /* Create callgraph node clone with new declaration. The actual body will
950 be copied later at compilation stage. */
951 cgraph_node *create_virtual_clone (vec<cgraph_edge *> redirect_callers,
952 vec<ipa_replace_map *, va_gc> *tree_map,
953 bitmap args_to_skip, const char * suffix);
954
955 /* cgraph node being removed from symbol table; see if its entry can be
956 replaced by other inline clone. */
957 cgraph_node *find_replacement (void);
958
959 /* Create a new cgraph node which is the new version of
960 callgraph node. REDIRECT_CALLERS holds the callers
961 edges which should be redirected to point to
962 NEW_VERSION. ALL the callees edges of the node
963 are cloned to the new version node. Return the new
964 version node.
965
966 If non-NULL BLOCK_TO_COPY determine what basic blocks
967 was copied to prevent duplications of calls that are dead
968 in the clone.
969
970 SUFFIX is string that is appended to the original name. */
971
972 cgraph_node *create_version_clone (tree new_decl,
973 vec<cgraph_edge *> redirect_callers,
974 bitmap bbs_to_copy,
975 const char *suffix = NULL);
976
977 /* Perform function versioning.
978 Function versioning includes copying of the tree and
979 a callgraph update (creating a new cgraph node and updating
980 its callees and callers).
981
982 REDIRECT_CALLERS varray includes the edges to be redirected
983 to the new version.
984
985 TREE_MAP is a mapping of tree nodes we want to replace with
986 new ones (according to results of prior analysis).
987
988 If non-NULL ARGS_TO_SKIP determine function parameters to remove
989 from new version.
990 If SKIP_RETURN is true, the new version will return void.
991 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
992 If non_NULL NEW_ENTRY determine new entry BB of the clone.
993
994 Return the new version's cgraph node. */
995 cgraph_node *create_version_clone_with_body
996 (vec<cgraph_edge *> redirect_callers,
997 vec<ipa_replace_map *, va_gc> *tree_map, bitmap args_to_skip,
998 bool skip_return, bitmap bbs_to_copy, basic_block new_entry_block,
999 const char *clone_name);
1000
1001 /* Insert a new cgraph_function_version_info node into cgraph_fnver_htab
1002 corresponding to cgraph_node. */
1003 cgraph_function_version_info *insert_new_function_version (void);
1004
1005 /* Get the cgraph_function_version_info node corresponding to node. */
1006 cgraph_function_version_info *function_version (void);
1007
1008 /* Discover all functions and variables that are trivially needed, analyze
1009 them as well as all functions and variables referred by them */
1010 void analyze (void);
1011
1012 /* Add thunk alias into callgraph. The alias declaration is ALIAS and it
1013 aliases DECL with an adjustments made into the first parameter.
1014 See comments in struct cgraph_thunk_info for detail on the parameters. */
1015 cgraph_node * create_thunk (tree alias, tree, bool this_adjusting,
1016 HOST_WIDE_INT fixed_offset,
1017 HOST_WIDE_INT virtual_value,
1018 tree virtual_offset,
1019 tree real_alias);
1020
1021
1022 /* Return node that alias is aliasing. */
1023 inline cgraph_node *get_alias_target (void);
1024
1025 /* Given function symbol, walk the alias chain to return the function node
1026 is alias of. Do not walk through thunks.
1027 When AVAILABILITY is non-NULL, get minimal availability in the chain.
1028 When REF is non-NULL, assume that reference happens in symbol REF
1029 when determining the availability. */
1030
1031 cgraph_node *ultimate_alias_target (availability *availability = NULL,
1032 symtab_node *ref = NULL);
1033
1034 /* Expand thunk NODE to gimple if possible.
1035 When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
1036 no assembler is produced.
1037 When OUTPUT_ASM_THUNK is true, also produce assembler for
1038 thunks that are not lowered. */
1039 bool expand_thunk (bool output_asm_thunks, bool force_gimple_thunk);
1040
1041 /* Call expand_thunk on all callers that are thunks and analyze those
1042 nodes that were expanded. */
1043 void expand_all_artificial_thunks ();
1044
1045 /* Assemble thunks and aliases associated to node. */
1046 void assemble_thunks_and_aliases (void);
1047
1048 /* Expand function specified by node. */
1049 void expand (void);
1050
1051 /* As an GCC extension we allow redefinition of the function. The
1052 semantics when both copies of bodies differ is not well defined.
1053 We replace the old body with new body so in unit at a time mode
1054 we always use new body, while in normal mode we may end up with
1055 old body inlined into some functions and new body expanded and
1056 inlined in others. */
1057 void reset (void);
1058
1059 /* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
1060 kind of wrapper method. */
1061 void create_wrapper (cgraph_node *target);
1062
1063 /* Verify cgraph nodes of the cgraph node. */
1064 void DEBUG_FUNCTION verify_node (void);
1065
1066 /* Remove function from symbol table. */
1067 void remove (void);
1068
1069 /* Dump call graph node to file F. */
1070 void dump (FILE *f);
1071
1072 /* Dump call graph node to stderr. */
1073 void DEBUG_FUNCTION debug (void);
1074
1075 /* When doing LTO, read cgraph_node's body from disk if it is not already
1076 present. */
1077 bool get_untransformed_body (void);
1078
1079 /* Prepare function body. When doing LTO, read cgraph_node's body from disk
1080 if it is not already present. When some IPA transformations are scheduled,
1081 apply them. */
1082 bool get_body (void);
1083
1084 /* Release memory used to represent body of function.
1085 Use this only for functions that are released before being translated to
1086 target code (i.e. RTL). Functions that are compiled to RTL and beyond
1087 are free'd in final.c via free_after_compilation(). */
1088 void release_body (bool keep_arguments = false);
1089
1090 /* Return the DECL_STRUCT_FUNCTION of the function. */
1091 struct function *get_fun (void);
1092
1093 /* cgraph_node is no longer nested function; update cgraph accordingly. */
1094 void unnest (void);
1095
1096 /* Bring cgraph node local. */
1097 void make_local (void);
1098
1099 /* Likewise indicate that a node is having address taken. */
1100 void mark_address_taken (void);
1101
1102 /* Set fialization priority to PRIORITY. */
1103 void set_fini_priority (priority_type priority);
1104
1105 /* Return the finalization priority. */
1106 priority_type get_fini_priority (void);
1107
1108 /* Create edge from a given function to CALLEE in the cgraph. */
1109 cgraph_edge *create_edge (cgraph_node *callee,
1110 gcall *call_stmt, profile_count count);
1111
1112 /* Create an indirect edge with a yet-undetermined callee where the call
1113 statement destination is a formal parameter of the caller with index
1114 PARAM_INDEX. */
1115 cgraph_edge *create_indirect_edge (gcall *call_stmt, int ecf_flags,
1116 profile_count count,
1117 bool compute_indirect_info = true);
1118
1119 /* Like cgraph_create_edge walk the clone tree and update all clones sharing
1120 same function body. If clones already have edge for OLD_STMT; only
1121 update the edge same way as cgraph_set_call_stmt_including_clones does. */
1122 void create_edge_including_clones (cgraph_node *callee,
1123 gimple *old_stmt, gcall *stmt,
1124 profile_count count,
1125 cgraph_inline_failed_t reason);
1126
1127 /* Return the callgraph edge representing the GIMPLE_CALL statement
1128 CALL_STMT. */
1129 cgraph_edge *get_edge (gimple *call_stmt);
1130
1131 /* Collect all callers of cgraph_node and its aliases that are known to lead
1132 to NODE (i.e. are not overwritable) and that are not thunks. */
1133 vec<cgraph_edge *> collect_callers (void);
1134
1135 /* Remove all callers from the node. */
1136 void remove_callers (void);
1137
1138 /* Remove all callees from the node. */
1139 void remove_callees (void);
1140
1141 /* Return function availability. See cgraph.h for description of individual
1142 return values. */
1143 enum availability get_availability (symtab_node *ref = NULL);
1144
1145 /* Set TREE_NOTHROW on cgraph_node's decl and on aliases of the node
1146 if any to NOTHROW. */
1147 bool set_nothrow_flag (bool nothrow);
1148
1149 /* SET DECL_IS_MALLOC on cgraph_node's decl and on aliases of the node
1150 if any. */
1151 bool set_malloc_flag (bool malloc_p);
1152
1153 /* If SET_CONST is true, mark function, aliases and thunks to be ECF_CONST.
1154 If SET_CONST if false, clear the flag.
1155
1156 When setting the flag be careful about possible interposition and
1157 do not set the flag for functions that can be interposet and set pure
1158 flag for functions that can bind to other definition.
1159
1160 Return true if any change was done. */
1161
1162 bool set_const_flag (bool set_const, bool looping);
1163
1164 /* Set DECL_PURE_P on cgraph_node's decl and on aliases of the node
1165 if any to PURE.
1166
1167 When setting the flag, be careful about possible interposition.
1168 Return true if any change was done. */
1169
1170 bool set_pure_flag (bool pure, bool looping);
1171
1172 /* Call callback on function and aliases associated to the function.
1173 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
1174 skipped. */
1175
1176 bool call_for_symbol_and_aliases (bool (*callback) (cgraph_node *,
1177 void *),
1178 void *data, bool include_overwritable);
1179
1180 /* Call callback on cgraph_node, thunks and aliases associated to NODE.
1181 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
1182 skipped. When EXCLUDE_VIRTUAL_THUNKS is true, virtual thunks are
1183 skipped. */
1184 bool call_for_symbol_thunks_and_aliases (bool (*callback) (cgraph_node *node,
1185 void *data),
1186 void *data,
1187 bool include_overwritable,
1188 bool exclude_virtual_thunks = false);
1189
1190 /* Likewise indicate that a node is needed, i.e. reachable via some
1191 external means. */
1192 inline void mark_force_output (void);
1193
1194 /* Return true when function can be marked local. */
1195 bool local_p (void);
1196
1197 /* Return true if cgraph_node can be made local for API change.
1198 Extern inline functions and C++ COMDAT functions can be made local
1199 at the expense of possible code size growth if function is used in multiple
1200 compilation units. */
1201 bool can_be_local_p (void);
1202
1203 /* Return true when cgraph_node can not return or throw and thus
1204 it is safe to ignore its side effects for IPA analysis. */
1205 bool cannot_return_p (void);
1206
1207 /* Return true when function cgraph_node and all its aliases are only called
1208 directly.
1209 i.e. it is not externally visible, address was not taken and
1210 it is not used in any other non-standard way. */
1211 bool only_called_directly_p (void);
1212
1213 /* Return true when function is only called directly or it has alias.
1214 i.e. it is not externally visible, address was not taken and
1215 it is not used in any other non-standard way. */
1216 inline bool only_called_directly_or_aliased_p (void);
1217
1218 /* Return true when function cgraph_node can be expected to be removed
1219 from program when direct calls in this compilation unit are removed.
1220
1221 As a special case COMDAT functions are
1222 cgraph_can_remove_if_no_direct_calls_p while the are not
1223 cgraph_only_called_directly_p (it is possible they are called from other
1224 unit)
1225
1226 This function behaves as cgraph_only_called_directly_p because eliminating
1227 all uses of COMDAT function does not make it necessarily disappear from
1228 the program unless we are compiling whole program or we do LTO. In this
1229 case we know we win since dynamic linking will not really discard the
1230 linkonce section.
1231
1232 If WILL_INLINE is true, assume that function will be inlined into all the
1233 direct calls. */
1234 bool will_be_removed_from_program_if_no_direct_calls_p
1235 (bool will_inline = false);
1236
1237 /* Return true when function can be removed from callgraph
1238 if all direct calls and references are eliminated. The function does
1239 not take into account comdat groups. */
1240 bool can_remove_if_no_direct_calls_and_refs_p (void);
1241
1242 /* Return true when function cgraph_node and its aliases can be removed from
1243 callgraph if all direct calls are eliminated.
1244 If WILL_INLINE is true, assume that function will be inlined into all the
1245 direct calls. */
1246 bool can_remove_if_no_direct_calls_p (bool will_inline = false);
1247
1248 /* Return true when callgraph node is a function with Gimple body defined
1249 in current unit. Functions can also be define externally or they
1250 can be thunks with no Gimple representation.
1251
1252 Note that at WPA stage, the function body may not be present in memory. */
1253 inline bool has_gimple_body_p (void);
1254
1255 /* Return true if function should be optimized for size. */
1256 bool optimize_for_size_p (void);
1257
1258 /* Dump the callgraph to file F. */
1259 static void dump_cgraph (FILE *f);
1260
1261 /* Dump the call graph to stderr. */
1262 static inline
1263 void debug_cgraph (void)
1264 {
1265 dump_cgraph (stderr);
1266 }
1267
1268 /* Record that DECL1 and DECL2 are semantically identical function
1269 versions. */
1270 static void record_function_versions (tree decl1, tree decl2);
1271
1272 /* Remove the cgraph_function_version_info and cgraph_node for DECL. This
1273 DECL is a duplicate declaration. */
1274 static void delete_function_version_by_decl (tree decl);
1275
1276 /* Add the function FNDECL to the call graph.
1277 Unlike finalize_function, this function is intended to be used
1278 by middle end and allows insertion of new function at arbitrary point
1279 of compilation. The function can be either in high, low or SSA form
1280 GIMPLE.
1281
1282 The function is assumed to be reachable and have address taken (so no
1283 API breaking optimizations are performed on it).
1284
1285 Main work done by this function is to enqueue the function for later
1286 processing to avoid need the passes to be re-entrant. */
1287 static void add_new_function (tree fndecl, bool lowered);
1288
1289 /* Return callgraph node for given symbol and check it is a function. */
1290 static inline cgraph_node *get (const_tree decl)
1291 {
1292 gcc_checking_assert (TREE_CODE (decl) == FUNCTION_DECL);
1293 return dyn_cast <cgraph_node *> (symtab_node::get (decl));
1294 }
1295
1296 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
1297 logic in effect. If NO_COLLECT is true, then our caller cannot stand to
1298 have the garbage collector run at the moment. We would need to either
1299 create a new GC context, or just not compile right now. */
1300 static void finalize_function (tree, bool);
1301
1302 /* Return cgraph node assigned to DECL. Create new one when needed. */
1303 static cgraph_node * create (tree decl);
1304
1305 /* Try to find a call graph node for declaration DECL and if it does not
1306 exist or if it corresponds to an inline clone, create a new one. */
1307 static cgraph_node * get_create (tree);
1308
1309 /* Return local info for the compiled function. */
1310 static cgraph_local_info *local_info (tree decl);
1311
1312 /* Return local info for the compiled function. */
1313 static struct cgraph_rtl_info *rtl_info (tree);
1314
1315 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
1316 Return NULL if there's no such node. */
1317 static cgraph_node *get_for_asmname (tree asmname);
1318
1319 /* Attempt to mark ALIAS as an alias to DECL. Return alias node if
1320 successful and NULL otherwise.
1321 Same body aliases are output whenever the body of DECL is output,
1322 and cgraph_node::get (ALIAS) transparently
1323 returns cgraph_node::get (DECL). */
1324 static cgraph_node * create_same_body_alias (tree alias, tree decl);
1325
1326 /* Verify whole cgraph structure. */
1327 static void DEBUG_FUNCTION verify_cgraph_nodes (void);
1328
1329 /* Verify cgraph, if consistency checking is enabled. */
1330 static inline void checking_verify_cgraph_nodes (void);
1331
1332 /* Worker to bring NODE local. */
1333 static bool make_local (cgraph_node *node, void *);
1334
1335 /* Mark ALIAS as an alias to DECL. DECL_NODE is cgraph node representing
1336 the function body is associated
1337 with (not necessarily cgraph_node (DECL). */
1338 static cgraph_node *create_alias (tree alias, tree target);
1339
1340 /* Return true if NODE has thunk. */
1341 static bool has_thunk_p (cgraph_node *node, void *);
1342
1343 cgraph_edge *callees;
1344 cgraph_edge *callers;
1345 /* List of edges representing indirect calls with a yet undetermined
1346 callee. */
1347 cgraph_edge *indirect_calls;
1348 /* For nested functions points to function the node is nested in. */
1349 cgraph_node *origin;
1350 /* Points to first nested function, if any. */
1351 cgraph_node *nested;
1352 /* Pointer to the next function with same origin, if any. */
1353 cgraph_node *next_nested;
1354 /* Pointer to the next clone. */
1355 cgraph_node *next_sibling_clone;
1356 cgraph_node *prev_sibling_clone;
1357 cgraph_node *clones;
1358 cgraph_node *clone_of;
1359 /* If instrumentation_clone is 1 then instrumented_version points
1360 to the original function used to make instrumented version.
1361 Otherwise points to instrumented version of the function. */
1362 cgraph_node *instrumented_version;
1363 /* If instrumentation_clone is 1 then orig_decl is the original
1364 function declaration. */
1365 tree orig_decl;
1366 /* For functions with many calls sites it holds map from call expression
1367 to the edge to speed up cgraph_edge function. */
1368 hash_table<cgraph_edge_hasher> *GTY(()) call_site_hash;
1369 /* Declaration node used to be clone of. */
1370 tree former_clone_of;
1371
1372 /* If this is a SIMD clone, this points to the SIMD specific
1373 information for it. */
1374 cgraph_simd_clone *simdclone;
1375 /* If this function has SIMD clones, this points to the first clone. */
1376 cgraph_node *simd_clones;
1377
1378 /* Interprocedural passes scheduled to have their transform functions
1379 applied next time we execute local pass on them. We maintain it
1380 per-function in order to allow IPA passes to introduce new functions. */
1381 vec<ipa_opt_pass> GTY((skip)) ipa_transforms_to_apply;
1382
1383 cgraph_local_info local;
1384 cgraph_global_info global;
1385 struct cgraph_rtl_info *rtl;
1386 cgraph_clone_info clone;
1387 cgraph_thunk_info thunk;
1388
1389 /* Expected number of executions: calculated in profile.c. */
1390 profile_count count;
1391 /* How to scale counts at materialization time; used to merge
1392 LTO units with different number of profile runs. */
1393 int count_materialization_scale;
1394 /* Unique id of the node. */
1395 int uid;
1396 /* Summary unique id of the node. */
1397 int summary_uid;
1398 /* ID assigned by the profiling. */
1399 unsigned int profile_id;
1400 /* Time profiler: first run of function. */
1401 int tp_first_run;
1402
1403 /* Set when decl is an abstract function pointed to by the
1404 ABSTRACT_DECL_ORIGIN of a reachable function. */
1405 unsigned used_as_abstract_origin : 1;
1406 /* Set once the function is lowered (i.e. its CFG is built). */
1407 unsigned lowered : 1;
1408 /* Set once the function has been instantiated and its callee
1409 lists created. */
1410 unsigned process : 1;
1411 /* How commonly executed the node is. Initialized during branch
1412 probabilities pass. */
1413 ENUM_BITFIELD (node_frequency) frequency : 2;
1414 /* True when function can only be called at startup (from static ctor). */
1415 unsigned only_called_at_startup : 1;
1416 /* True when function can only be called at startup (from static dtor). */
1417 unsigned only_called_at_exit : 1;
1418 /* True when function is the transactional clone of a function which
1419 is called only from inside transactions. */
1420 /* ?? We should be able to remove this. We have enough bits in
1421 cgraph to calculate it. */
1422 unsigned tm_clone : 1;
1423 /* True if this decl is a dispatcher for function versions. */
1424 unsigned dispatcher_function : 1;
1425 /* True if this decl calls a COMDAT-local function. This is set up in
1426 compute_fn_summary and inline_call. */
1427 unsigned calls_comdat_local : 1;
1428 /* True if node has been created by merge operation in IPA-ICF. */
1429 unsigned icf_merged: 1;
1430 /* True when function is clone created for Pointer Bounds Checker
1431 instrumentation. */
1432 unsigned instrumentation_clone : 1;
1433 /* True if call to node can't result in a call to free, munmap or
1434 other operation that could make previously non-trapping memory
1435 accesses trapping. */
1436 unsigned nonfreeing_fn : 1;
1437 /* True if there was multiple COMDAT bodies merged by lto-symtab. */
1438 unsigned merged_comdat : 1;
1439 /* True if function was created to be executed in parallel. */
1440 unsigned parallelized_function : 1;
1441 /* True if function is part split out by ipa-split. */
1442 unsigned split_part : 1;
1443 /* True if the function appears as possible target of indirect call. */
1444 unsigned indirect_call_target : 1;
1445
1446 private:
1447 /* Worker for call_for_symbol_and_aliases. */
1448 bool call_for_symbol_and_aliases_1 (bool (*callback) (cgraph_node *,
1449 void *),
1450 void *data, bool include_overwritable);
1451 };
1452
1453 /* A cgraph node set is a collection of cgraph nodes. A cgraph node
1454 can appear in multiple sets. */
1455 struct cgraph_node_set_def
1456 {
1457 hash_map<cgraph_node *, size_t> *map;
1458 vec<cgraph_node *> nodes;
1459 };
1460
1461 typedef cgraph_node_set_def *cgraph_node_set;
1462 typedef struct varpool_node_set_def *varpool_node_set;
1463
1464 class varpool_node;
1465
1466 /* A varpool node set is a collection of varpool nodes. A varpool node
1467 can appear in multiple sets. */
1468 struct varpool_node_set_def
1469 {
1470 hash_map<varpool_node *, size_t> * map;
1471 vec<varpool_node *> nodes;
1472 };
1473
1474 /* Iterator structure for cgraph node sets. */
1475 struct cgraph_node_set_iterator
1476 {
1477 cgraph_node_set set;
1478 unsigned index;
1479 };
1480
1481 /* Iterator structure for varpool node sets. */
1482 struct varpool_node_set_iterator
1483 {
1484 varpool_node_set set;
1485 unsigned index;
1486 };
1487
1488 /* Context of polymorphic call. It represent information about the type of
1489 instance that may reach the call. This is used by ipa-devirt walkers of the
1490 type inheritance graph. */
1491
1492 class GTY(()) ipa_polymorphic_call_context {
1493 public:
1494 /* The called object appears in an object of type OUTER_TYPE
1495 at offset OFFSET. When information is not 100% reliable, we
1496 use SPECULATIVE_OUTER_TYPE and SPECULATIVE_OFFSET. */
1497 HOST_WIDE_INT offset;
1498 HOST_WIDE_INT speculative_offset;
1499 tree outer_type;
1500 tree speculative_outer_type;
1501 /* True if outer object may be in construction or destruction. */
1502 unsigned maybe_in_construction : 1;
1503 /* True if outer object may be of derived type. */
1504 unsigned maybe_derived_type : 1;
1505 /* True if speculative outer object may be of derived type. We always
1506 speculate that construction does not happen. */
1507 unsigned speculative_maybe_derived_type : 1;
1508 /* True if the context is invalid and all calls should be redirected
1509 to BUILTIN_UNREACHABLE. */
1510 unsigned invalid : 1;
1511 /* True if the outer type is dynamic. */
1512 unsigned dynamic : 1;
1513
1514 /* Build empty "I know nothing" context. */
1515 ipa_polymorphic_call_context ();
1516 /* Build polymorphic call context for indirect call E. */
1517 ipa_polymorphic_call_context (cgraph_edge *e);
1518 /* Build polymorphic call context for IP invariant CST.
1519 If specified, OTR_TYPE specify the type of polymorphic call
1520 that takes CST+OFFSET as a prameter. */
1521 ipa_polymorphic_call_context (tree cst, tree otr_type = NULL,
1522 HOST_WIDE_INT offset = 0);
1523 /* Build context for pointer REF contained in FNDECL at statement STMT.
1524 if INSTANCE is non-NULL, return pointer to the object described by
1525 the context. */
1526 ipa_polymorphic_call_context (tree fndecl, tree ref, gimple *stmt,
1527 tree *instance = NULL);
1528
1529 /* Look for vtable stores or constructor calls to work out dynamic type
1530 of memory location. */
1531 bool get_dynamic_type (tree, tree, tree, gimple *);
1532
1533 /* Make context non-speculative. */
1534 void clear_speculation ();
1535
1536 /* Produce context specifying all derrived types of OTR_TYPE. If OTR_TYPE is
1537 NULL, the context is set to dummy "I know nothing" setting. */
1538 void clear_outer_type (tree otr_type = NULL);
1539
1540 /* Walk container types and modify context to point to actual class
1541 containing OTR_TYPE (if non-NULL) as base class.
1542 Return true if resulting context is valid.
1543
1544 When CONSIDER_PLACEMENT_NEW is false, reject contexts that may be made
1545 valid only via allocation of new polymorphic type inside by means
1546 of placement new.
1547
1548 When CONSIDER_BASES is false, only look for actual fields, not base types
1549 of TYPE. */
1550 bool restrict_to_inner_class (tree otr_type,
1551 bool consider_placement_new = true,
1552 bool consider_bases = true);
1553
1554 /* Adjust all offsets in contexts by given number of bits. */
1555 void offset_by (HOST_WIDE_INT);
1556 /* Use when we can not track dynamic type change. This speculatively assume
1557 type change is not happening. */
1558 void possible_dynamic_type_change (bool, tree otr_type = NULL);
1559 /* Assume that both THIS and a given context is valid and strenghten THIS
1560 if possible. Return true if any strenghtening was made.
1561 If actual type the context is being used in is known, OTR_TYPE should be
1562 set accordingly. This improves quality of combined result. */
1563 bool combine_with (ipa_polymorphic_call_context, tree otr_type = NULL);
1564 bool meet_with (ipa_polymorphic_call_context, tree otr_type = NULL);
1565
1566 /* Return TRUE if context is fully useless. */
1567 bool useless_p () const;
1568 /* Return TRUE if this context conveys the same information as X. */
1569 bool equal_to (const ipa_polymorphic_call_context &x) const;
1570
1571 /* Dump human readable context to F. If NEWLINE is true, it will be
1572 terminated by a newline. */
1573 void dump (FILE *f, bool newline = true) const;
1574 void DEBUG_FUNCTION debug () const;
1575
1576 /* LTO streaming. */
1577 void stream_out (struct output_block *) const;
1578 void stream_in (struct lto_input_block *, struct data_in *data_in);
1579
1580 private:
1581 bool combine_speculation_with (tree, HOST_WIDE_INT, bool, tree);
1582 bool meet_speculation_with (tree, HOST_WIDE_INT, bool, tree);
1583 void set_by_decl (tree, HOST_WIDE_INT);
1584 bool set_by_invariant (tree, tree, HOST_WIDE_INT);
1585 bool speculation_consistent_p (tree, HOST_WIDE_INT, bool, tree) const;
1586 void make_speculative (tree otr_type = NULL);
1587 };
1588
1589 /* Structure containing additional information about an indirect call. */
1590
1591 struct GTY(()) cgraph_indirect_call_info
1592 {
1593 /* When agg_content is set, an offset where the call pointer is located
1594 within the aggregate. */
1595 HOST_WIDE_INT offset;
1596 /* Context of the polymorphic call; use only when POLYMORPHIC flag is set. */
1597 ipa_polymorphic_call_context context;
1598 /* OBJ_TYPE_REF_TOKEN of a polymorphic call (if polymorphic is set). */
1599 HOST_WIDE_INT otr_token;
1600 /* Type of the object from OBJ_TYPE_REF_OBJECT. */
1601 tree otr_type;
1602 /* Index of the parameter that is called. */
1603 int param_index;
1604 /* ECF flags determined from the caller. */
1605 int ecf_flags;
1606 /* Profile_id of common target obtrained from profile. */
1607 int common_target_id;
1608 /* Probability that call will land in function with COMMON_TARGET_ID. */
1609 int common_target_probability;
1610
1611 /* Set when the call is a virtual call with the parameter being the
1612 associated object pointer rather than a simple direct call. */
1613 unsigned polymorphic : 1;
1614 /* Set when the call is a call of a pointer loaded from contents of an
1615 aggregate at offset. */
1616 unsigned agg_contents : 1;
1617 /* Set when this is a call through a member pointer. */
1618 unsigned member_ptr : 1;
1619 /* When the agg_contents bit is set, this one determines whether the
1620 destination is loaded from a parameter passed by reference. */
1621 unsigned by_ref : 1;
1622 /* When the agg_contents bit is set, this one determines whether we can
1623 deduce from the function body that the loaded value from the reference is
1624 never modified between the invocation of the function and the load
1625 point. */
1626 unsigned guaranteed_unmodified : 1;
1627 /* For polymorphic calls this specify whether the virtual table pointer
1628 may have changed in between function entry and the call. */
1629 unsigned vptr_changed : 1;
1630 };
1631
1632 struct GTY((chain_next ("%h.next_caller"), chain_prev ("%h.prev_caller"),
1633 for_user)) cgraph_edge {
1634 friend class cgraph_node;
1635
1636 /* Remove the edge in the cgraph. */
1637 void remove (void);
1638
1639 /* Change field call_stmt of edge to NEW_STMT.
1640 If UPDATE_SPECULATIVE and E is any component of speculative
1641 edge, then update all components. */
1642 void set_call_stmt (gcall *new_stmt, bool update_speculative = true);
1643
1644 /* Redirect callee of the edge to N. The function does not update underlying
1645 call expression. */
1646 void redirect_callee (cgraph_node *n);
1647
1648 /* If the edge does not lead to a thunk, simply redirect it to N. Otherwise
1649 create one or more equivalent thunks for N and redirect E to the first in
1650 the chain. Note that it is then necessary to call
1651 n->expand_all_artificial_thunks once all callers are redirected. */
1652 void redirect_callee_duplicating_thunks (cgraph_node *n);
1653
1654 /* Make an indirect edge with an unknown callee an ordinary edge leading to
1655 CALLEE. DELTA is an integer constant that is to be added to the this
1656 pointer (first parameter) to compensate for skipping
1657 a thunk adjustment. */
1658 cgraph_edge *make_direct (cgraph_node *callee);
1659
1660 /* Turn edge into speculative call calling N2. Update
1661 the profile so the direct call is taken COUNT times
1662 with FREQUENCY. */
1663 cgraph_edge *make_speculative (cgraph_node *n2, profile_count direct_count);
1664
1665 /* Given speculative call edge, return all three components. */
1666 void speculative_call_info (cgraph_edge *&direct, cgraph_edge *&indirect,
1667 ipa_ref *&reference);
1668
1669 /* Speculative call edge turned out to be direct call to CALLE_DECL.
1670 Remove the speculative call sequence and return edge representing the call.
1671 It is up to caller to redirect the call as appropriate. */
1672 cgraph_edge *resolve_speculation (tree callee_decl = NULL);
1673
1674 /* If necessary, change the function declaration in the call statement
1675 associated with the edge so that it corresponds to the edge callee. */
1676 gimple *redirect_call_stmt_to_callee (void);
1677
1678 /* Create clone of edge in the node N represented
1679 by CALL_EXPR the callgraph. */
1680 cgraph_edge * clone (cgraph_node *n, gcall *call_stmt, unsigned stmt_uid,
1681 profile_count num, profile_count den,
1682 bool update_original);
1683
1684 /* Verify edge count and frequency. */
1685 bool verify_count ();
1686
1687 /* Return true when call of edge can not lead to return from caller
1688 and thus it is safe to ignore its side effects for IPA analysis
1689 when computing side effects of the caller. */
1690 bool cannot_lead_to_return_p (void);
1691
1692 /* Return true when the edge represents a direct recursion. */
1693 bool recursive_p (void);
1694
1695 /* Return true if the call can be hot. */
1696 bool maybe_hot_p (void);
1697
1698 /* Rebuild cgraph edges for current function node. This needs to be run after
1699 passes that don't update the cgraph. */
1700 static unsigned int rebuild_edges (void);
1701
1702 /* Rebuild cgraph references for current function node. This needs to be run
1703 after passes that don't update the cgraph. */
1704 static void rebuild_references (void);
1705
1706 /* Expected number of executions: calculated in profile.c. */
1707 profile_count count;
1708 cgraph_node *caller;
1709 cgraph_node *callee;
1710 cgraph_edge *prev_caller;
1711 cgraph_edge *next_caller;
1712 cgraph_edge *prev_callee;
1713 cgraph_edge *next_callee;
1714 gcall *call_stmt;
1715 /* Additional information about an indirect call. Not cleared when an edge
1716 becomes direct. */
1717 cgraph_indirect_call_info *indirect_info;
1718 PTR GTY ((skip (""))) aux;
1719 /* When equal to CIF_OK, inline this call. Otherwise, points to the
1720 explanation why function was not inlined. */
1721 enum cgraph_inline_failed_t inline_failed;
1722 /* The stmt_uid of call_stmt. This is used by LTO to recover the call_stmt
1723 when the function is serialized in. */
1724 unsigned int lto_stmt_uid;
1725 /* Unique id of the edge. */
1726 int uid;
1727 /* Whether this edge was made direct by indirect inlining. */
1728 unsigned int indirect_inlining_edge : 1;
1729 /* Whether this edge describes an indirect call with an undetermined
1730 callee. */
1731 unsigned int indirect_unknown_callee : 1;
1732 /* Whether this edge is still a dangling */
1733 /* True if the corresponding CALL stmt cannot be inlined. */
1734 unsigned int call_stmt_cannot_inline_p : 1;
1735 /* Can this call throw externally? */
1736 unsigned int can_throw_external : 1;
1737 /* Edges with SPECULATIVE flag represents indirect calls that was
1738 speculatively turned into direct (i.e. by profile feedback).
1739 The final code sequence will have form:
1740
1741 if (call_target == expected_fn)
1742 expected_fn ();
1743 else
1744 call_target ();
1745
1746 Every speculative call is represented by three components attached
1747 to a same call statement:
1748 1) a direct call (to expected_fn)
1749 2) an indirect call (to call_target)
1750 3) a IPA_REF_ADDR refrence to expected_fn.
1751
1752 Optimizers may later redirect direct call to clone, so 1) and 3)
1753 do not need to necesarily agree with destination. */
1754 unsigned int speculative : 1;
1755 /* Set to true when caller is a constructor or destructor of polymorphic
1756 type. */
1757 unsigned in_polymorphic_cdtor : 1;
1758
1759 /* Return true if call must bind to current definition. */
1760 bool binds_to_current_def_p ();
1761
1762 /* Expected frequency of executions within the function.
1763 When set to CGRAPH_FREQ_BASE, the edge is expected to be called once
1764 per function call. The range is 0 to CGRAPH_FREQ_MAX. */
1765 int frequency ();
1766
1767 /* Expected frequency of executions within the function. */
1768 sreal sreal_frequency ();
1769 private:
1770 /* Remove the edge from the list of the callers of the callee. */
1771 void remove_caller (void);
1772
1773 /* Remove the edge from the list of the callees of the caller. */
1774 void remove_callee (void);
1775
1776 /* Set callee N of call graph edge and add it to the corresponding set of
1777 callers. */
1778 void set_callee (cgraph_node *n);
1779
1780 /* Output flags of edge to a file F. */
1781 void dump_edge_flags (FILE *f);
1782
1783 /* Verify that call graph edge corresponds to DECL from the associated
1784 statement. Return true if the verification should fail. */
1785 bool verify_corresponds_to_fndecl (tree decl);
1786 };
1787
1788 #define CGRAPH_FREQ_BASE 1000
1789 #define CGRAPH_FREQ_MAX 100000
1790
1791 /* The varpool data structure.
1792 Each static variable decl has assigned varpool_node. */
1793
1794 class GTY((tag ("SYMTAB_VARIABLE"))) varpool_node : public symtab_node {
1795 public:
1796 /* Dump given varpool node to F. */
1797 void dump (FILE *f);
1798
1799 /* Dump given varpool node to stderr. */
1800 void DEBUG_FUNCTION debug (void);
1801
1802 /* Remove variable from symbol table. */
1803 void remove (void);
1804
1805 /* Remove node initializer when it is no longer needed. */
1806 void remove_initializer (void);
1807
1808 void analyze (void);
1809
1810 /* Return variable availability. */
1811 availability get_availability (symtab_node *ref = NULL);
1812
1813 /* When doing LTO, read variable's constructor from disk if
1814 it is not already present. */
1815 tree get_constructor (void);
1816
1817 /* Return true if variable has constructor that can be used for folding. */
1818 bool ctor_useable_for_folding_p (void);
1819
1820 /* For given variable pool node, walk the alias chain to return the function
1821 the variable is alias of. Do not walk through thunks.
1822 When AVAILABILITY is non-NULL, get minimal availability in the chain.
1823 When REF is non-NULL, assume that reference happens in symbol REF
1824 when determining the availability. */
1825 inline varpool_node *ultimate_alias_target
1826 (availability *availability = NULL, symtab_node *ref = NULL);
1827
1828 /* Return node that alias is aliasing. */
1829 inline varpool_node *get_alias_target (void);
1830
1831 /* Output one variable, if necessary. Return whether we output it. */
1832 bool assemble_decl (void);
1833
1834 /* For variables in named sections make sure get_variable_section
1835 is called before we switch to those sections. Then section
1836 conflicts between read-only and read-only requiring relocations
1837 sections can be resolved. */
1838 void finalize_named_section_flags (void);
1839
1840 /* Call calback on varpool symbol and aliases associated to varpool symbol.
1841 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
1842 skipped. */
1843 bool call_for_symbol_and_aliases (bool (*callback) (varpool_node *, void *),
1844 void *data,
1845 bool include_overwritable);
1846
1847 /* Return true when variable should be considered externally visible. */
1848 bool externally_visible_p (void);
1849
1850 /* Return true when all references to variable must be visible
1851 in ipa_ref_list.
1852 i.e. if the variable is not externally visible or not used in some magic
1853 way (asm statement or such).
1854 The magic uses are all summarized in force_output flag. */
1855 inline bool all_refs_explicit_p ();
1856
1857 /* Return true when variable can be removed from variable pool
1858 if all direct calls are eliminated. */
1859 inline bool can_remove_if_no_refs_p (void);
1860
1861 /* Add the variable DECL to the varpool.
1862 Unlike finalize_decl function is intended to be used
1863 by middle end and allows insertion of new variable at arbitrary point
1864 of compilation. */
1865 static void add (tree decl);
1866
1867 /* Return varpool node for given symbol and check it is a function. */
1868 static inline varpool_node *get (const_tree decl);
1869
1870 /* Mark DECL as finalized. By finalizing the declaration, frontend instruct
1871 the middle end to output the variable to asm file, if needed or externally
1872 visible. */
1873 static void finalize_decl (tree decl);
1874
1875 /* Attempt to mark ALIAS as an alias to DECL. Return TRUE if successful.
1876 Extra name aliases are output whenever DECL is output. */
1877 static varpool_node * create_extra_name_alias (tree alias, tree decl);
1878
1879 /* Attempt to mark ALIAS as an alias to DECL. Return TRUE if successful.
1880 Extra name aliases are output whenever DECL is output. */
1881 static varpool_node * create_alias (tree, tree);
1882
1883 /* Dump the variable pool to F. */
1884 static void dump_varpool (FILE *f);
1885
1886 /* Dump the variable pool to stderr. */
1887 static void DEBUG_FUNCTION debug_varpool (void);
1888
1889 /* Allocate new callgraph node and insert it into basic data structures. */
1890 static varpool_node *create_empty (void);
1891
1892 /* Return varpool node assigned to DECL. Create new one when needed. */
1893 static varpool_node *get_create (tree decl);
1894
1895 /* Given an assembler name, lookup node. */
1896 static varpool_node *get_for_asmname (tree asmname);
1897
1898 /* Set when variable is scheduled to be assembled. */
1899 unsigned output : 1;
1900
1901 /* Set when variable has statically initialized pointer
1902 or is a static bounds variable and needs initalization. */
1903 unsigned need_bounds_init : 1;
1904
1905 /* Set if the variable is dynamically initialized, except for
1906 function local statics. */
1907 unsigned dynamically_initialized : 1;
1908
1909 ENUM_BITFIELD(tls_model) tls_model : 3;
1910
1911 /* Set if the variable is known to be used by single function only.
1912 This is computed by ipa_signle_use pass and used by late optimizations
1913 in places where optimization would be valid for local static variable
1914 if we did not do any inter-procedural code movement. */
1915 unsigned used_by_single_function : 1;
1916
1917 private:
1918 /* Assemble thunks and aliases associated to varpool node. */
1919 void assemble_aliases (void);
1920
1921 /* Worker for call_for_node_and_aliases. */
1922 bool call_for_symbol_and_aliases_1 (bool (*callback) (varpool_node *, void *),
1923 void *data,
1924 bool include_overwritable);
1925 };
1926
1927 /* Every top level asm statement is put into a asm_node. */
1928
1929 struct GTY(()) asm_node {
1930
1931
1932 /* Next asm node. */
1933 asm_node *next;
1934 /* String for this asm node. */
1935 tree asm_str;
1936 /* Ordering of all cgraph nodes. */
1937 int order;
1938 };
1939
1940 /* Report whether or not THIS symtab node is a function, aka cgraph_node. */
1941
1942 template <>
1943 template <>
1944 inline bool
1945 is_a_helper <cgraph_node *>::test (symtab_node *p)
1946 {
1947 return p && p->type == SYMTAB_FUNCTION;
1948 }
1949
1950 /* Report whether or not THIS symtab node is a vriable, aka varpool_node. */
1951
1952 template <>
1953 template <>
1954 inline bool
1955 is_a_helper <varpool_node *>::test (symtab_node *p)
1956 {
1957 return p && p->type == SYMTAB_VARIABLE;
1958 }
1959
1960 /* Macros to access the next item in the list of free cgraph nodes and
1961 edges. */
1962 #define NEXT_FREE_NODE(NODE) dyn_cast<cgraph_node *> ((NODE)->next)
1963 #define SET_NEXT_FREE_NODE(NODE,NODE2) ((NODE))->next = NODE2
1964 #define NEXT_FREE_EDGE(EDGE) (EDGE)->prev_caller
1965
1966 typedef void (*cgraph_edge_hook)(cgraph_edge *, void *);
1967 typedef void (*cgraph_node_hook)(cgraph_node *, void *);
1968 typedef void (*varpool_node_hook)(varpool_node *, void *);
1969 typedef void (*cgraph_2edge_hook)(cgraph_edge *, cgraph_edge *, void *);
1970 typedef void (*cgraph_2node_hook)(cgraph_node *, cgraph_node *, void *);
1971
1972 struct cgraph_edge_hook_list;
1973 struct cgraph_node_hook_list;
1974 struct varpool_node_hook_list;
1975 struct cgraph_2edge_hook_list;
1976 struct cgraph_2node_hook_list;
1977
1978 /* Map from a symbol to initialization/finalization priorities. */
1979 struct GTY(()) symbol_priority_map {
1980 priority_type init;
1981 priority_type fini;
1982 };
1983
1984 enum symtab_state
1985 {
1986 /* Frontend is parsing and finalizing functions. */
1987 PARSING,
1988 /* Callgraph is being constructed. It is safe to add new functions. */
1989 CONSTRUCTION,
1990 /* Callgraph is being streamed-in at LTO time. */
1991 LTO_STREAMING,
1992 /* Callgraph is built and early IPA passes are being run. */
1993 IPA,
1994 /* Callgraph is built and all functions are transformed to SSA form. */
1995 IPA_SSA,
1996 /* All inline decisions are done; it is now possible to remove extern inline
1997 functions and virtual call targets. */
1998 IPA_SSA_AFTER_INLINING,
1999 /* Functions are now ordered and being passed to RTL expanders. */
2000 EXPANSION,
2001 /* All cgraph expansion is done. */
2002 FINISHED
2003 };
2004
2005 struct asmname_hasher : ggc_ptr_hash <symtab_node>
2006 {
2007 typedef const_tree compare_type;
2008
2009 static hashval_t hash (symtab_node *n);
2010 static bool equal (symtab_node *n, const_tree t);
2011 };
2012
2013 class GTY((tag ("SYMTAB"))) symbol_table
2014 {
2015 public:
2016 friend class symtab_node;
2017 friend class cgraph_node;
2018 friend class cgraph_edge;
2019
2020 symbol_table (): cgraph_max_summary_uid (1)
2021 {
2022 }
2023
2024 /* Initialize callgraph dump file. */
2025 void initialize (void);
2026
2027 /* Register a top-level asm statement ASM_STR. */
2028 inline asm_node *finalize_toplevel_asm (tree asm_str);
2029
2030 /* Analyze the whole compilation unit once it is parsed completely. */
2031 void finalize_compilation_unit (void);
2032
2033 /* C++ frontend produce same body aliases all over the place, even before PCH
2034 gets streamed out. It relies on us linking the aliases with their function
2035 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
2036 first produce aliases without links, but once C++ FE is sure he won't sream
2037 PCH we build the links via this function. */
2038 void process_same_body_aliases (void);
2039
2040 /* Perform simple optimizations based on callgraph. */
2041 void compile (void);
2042
2043 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
2044 functions into callgraph in a way so they look like ordinary reachable
2045 functions inserted into callgraph already at construction time. */
2046 void process_new_functions (void);
2047
2048 /* Once all functions from compilation unit are in memory, produce all clones
2049 and update all calls. We might also do this on demand if we don't want to
2050 bring all functions to memory prior compilation, but current WHOPR
2051 implementation does that and it is bit easier to keep everything right
2052 in this order. */
2053 void materialize_all_clones (void);
2054
2055 /* Register a symbol NODE. */
2056 inline void register_symbol (symtab_node *node);
2057
2058 inline void
2059 clear_asm_symbols (void)
2060 {
2061 asmnodes = NULL;
2062 asm_last_node = NULL;
2063 }
2064
2065 /* Perform reachability analysis and reclaim all unreachable nodes. */
2066 bool remove_unreachable_nodes (FILE *file);
2067
2068 /* Optimization of function bodies might've rendered some variables as
2069 unnecessary so we want to avoid these from being compiled. Re-do
2070 reachability starting from variables that are either externally visible
2071 or was referred from the asm output routines. */
2072 void remove_unreferenced_decls (void);
2073
2074 /* Unregister a symbol NODE. */
2075 inline void unregister (symtab_node *node);
2076
2077 /* Allocate new callgraph node and insert it into basic data structures. */
2078 cgraph_node *create_empty (void);
2079
2080 /* Release a callgraph NODE with UID and put in to the list
2081 of free nodes. */
2082 void release_symbol (cgraph_node *node, int uid);
2083
2084 /* Output all variables enqueued to be assembled. */
2085 bool output_variables (void);
2086
2087 /* Weakrefs may be associated to external decls and thus not output
2088 at expansion time. Emit all necessary aliases. */
2089 void output_weakrefs (void);
2090
2091 /* Return first static symbol with definition. */
2092 inline symtab_node *first_symbol (void);
2093
2094 /* Return first assembler symbol. */
2095 inline asm_node *
2096 first_asm_symbol (void)
2097 {
2098 return asmnodes;
2099 }
2100
2101 /* Return first static symbol with definition. */
2102 inline symtab_node *first_defined_symbol (void);
2103
2104 /* Return first variable. */
2105 inline varpool_node *first_variable (void);
2106
2107 /* Return next variable after NODE. */
2108 inline varpool_node *next_variable (varpool_node *node);
2109
2110 /* Return first static variable with initializer. */
2111 inline varpool_node *first_static_initializer (void);
2112
2113 /* Return next static variable with initializer after NODE. */
2114 inline varpool_node *next_static_initializer (varpool_node *node);
2115
2116 /* Return first static variable with definition. */
2117 inline varpool_node *first_defined_variable (void);
2118
2119 /* Return next static variable with definition after NODE. */
2120 inline varpool_node *next_defined_variable (varpool_node *node);
2121
2122 /* Return first function with body defined. */
2123 inline cgraph_node *first_defined_function (void);
2124
2125 /* Return next function with body defined after NODE. */
2126 inline cgraph_node *next_defined_function (cgraph_node *node);
2127
2128 /* Return first function. */
2129 inline cgraph_node *first_function (void);
2130
2131 /* Return next function. */
2132 inline cgraph_node *next_function (cgraph_node *node);
2133
2134 /* Return first function with body defined. */
2135 cgraph_node *first_function_with_gimple_body (void);
2136
2137 /* Return next reachable static variable with initializer after NODE. */
2138 inline cgraph_node *next_function_with_gimple_body (cgraph_node *node);
2139
2140 /* Register HOOK to be called with DATA on each removed edge. */
2141 cgraph_edge_hook_list *add_edge_removal_hook (cgraph_edge_hook hook,
2142 void *data);
2143
2144 /* Remove ENTRY from the list of hooks called on removing edges. */
2145 void remove_edge_removal_hook (cgraph_edge_hook_list *entry);
2146
2147 /* Register HOOK to be called with DATA on each removed node. */
2148 cgraph_node_hook_list *add_cgraph_removal_hook (cgraph_node_hook hook,
2149 void *data);
2150
2151 /* Remove ENTRY from the list of hooks called on removing nodes. */
2152 void remove_cgraph_removal_hook (cgraph_node_hook_list *entry);
2153
2154 /* Register HOOK to be called with DATA on each removed node. */
2155 varpool_node_hook_list *add_varpool_removal_hook (varpool_node_hook hook,
2156 void *data);
2157
2158 /* Remove ENTRY from the list of hooks called on removing nodes. */
2159 void remove_varpool_removal_hook (varpool_node_hook_list *entry);
2160
2161 /* Register HOOK to be called with DATA on each inserted node. */
2162 cgraph_node_hook_list *add_cgraph_insertion_hook (cgraph_node_hook hook,
2163 void *data);
2164
2165 /* Remove ENTRY from the list of hooks called on inserted nodes. */
2166 void remove_cgraph_insertion_hook (cgraph_node_hook_list *entry);
2167
2168 /* Register HOOK to be called with DATA on each inserted node. */
2169 varpool_node_hook_list *add_varpool_insertion_hook (varpool_node_hook hook,
2170 void *data);
2171
2172 /* Remove ENTRY from the list of hooks called on inserted nodes. */
2173 void remove_varpool_insertion_hook (varpool_node_hook_list *entry);
2174
2175 /* Register HOOK to be called with DATA on each duplicated edge. */
2176 cgraph_2edge_hook_list *add_edge_duplication_hook (cgraph_2edge_hook hook,
2177 void *data);
2178 /* Remove ENTRY from the list of hooks called on duplicating edges. */
2179 void remove_edge_duplication_hook (cgraph_2edge_hook_list *entry);
2180
2181 /* Register HOOK to be called with DATA on each duplicated node. */
2182 cgraph_2node_hook_list *add_cgraph_duplication_hook (cgraph_2node_hook hook,
2183 void *data);
2184
2185 /* Remove ENTRY from the list of hooks called on duplicating nodes. */
2186 void remove_cgraph_duplication_hook (cgraph_2node_hook_list *entry);
2187
2188 /* Call all edge removal hooks. */
2189 void call_edge_removal_hooks (cgraph_edge *e);
2190
2191 /* Call all node insertion hooks. */
2192 void call_cgraph_insertion_hooks (cgraph_node *node);
2193
2194 /* Call all node removal hooks. */
2195 void call_cgraph_removal_hooks (cgraph_node *node);
2196
2197 /* Call all node duplication hooks. */
2198 void call_cgraph_duplication_hooks (cgraph_node *node, cgraph_node *node2);
2199
2200 /* Call all edge duplication hooks. */
2201 void call_edge_duplication_hooks (cgraph_edge *cs1, cgraph_edge *cs2);
2202
2203 /* Call all node removal hooks. */
2204 void call_varpool_removal_hooks (varpool_node *node);
2205
2206 /* Call all node insertion hooks. */
2207 void call_varpool_insertion_hooks (varpool_node *node);
2208
2209 /* Arrange node to be first in its entry of assembler_name_hash. */
2210 void symtab_prevail_in_asm_name_hash (symtab_node *node);
2211
2212 /* Initalize asm name hash unless. */
2213 void symtab_initialize_asm_name_hash (void);
2214
2215 /* Set the DECL_ASSEMBLER_NAME and update symtab hashtables. */
2216 void change_decl_assembler_name (tree decl, tree name);
2217
2218 /* Dump symbol table to F. */
2219 void dump (FILE *f);
2220
2221 /* Dump symbol table to stderr. */
2222 inline DEBUG_FUNCTION void debug (void)
2223 {
2224 dump (stderr);
2225 }
2226
2227 /* Return true if assembler names NAME1 and NAME2 leads to the same symbol
2228 name. */
2229 static bool assembler_names_equal_p (const char *name1, const char *name2);
2230
2231 int cgraph_count;
2232 int cgraph_max_uid;
2233 int cgraph_max_summary_uid;
2234
2235 int edges_count;
2236 int edges_max_uid;
2237
2238 symtab_node* GTY(()) nodes;
2239 asm_node* GTY(()) asmnodes;
2240 asm_node* GTY(()) asm_last_node;
2241 cgraph_node* GTY(()) free_nodes;
2242
2243 /* Head of a linked list of unused (freed) call graph edges.
2244 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
2245 cgraph_edge * GTY(()) free_edges;
2246
2247 /* The order index of the next symtab node to be created. This is
2248 used so that we can sort the cgraph nodes in order by when we saw
2249 them, to support -fno-toplevel-reorder. */
2250 int order;
2251
2252 /* Set when whole unit has been analyzed so we can access global info. */
2253 bool global_info_ready;
2254 /* What state callgraph is in right now. */
2255 enum symtab_state state;
2256 /* Set when the cgraph is fully build and the basic flags are computed. */
2257 bool function_flags_ready;
2258
2259 bool cpp_implicit_aliases_done;
2260
2261 /* Hash table used to hold sectoons. */
2262 hash_table<section_name_hasher> *GTY(()) section_hash;
2263
2264 /* Hash table used to convert assembler names into nodes. */
2265 hash_table<asmname_hasher> *assembler_name_hash;
2266
2267 /* Hash table used to hold init priorities. */
2268 hash_map<symtab_node *, symbol_priority_map> *init_priority_hash;
2269
2270 FILE* GTY ((skip)) dump_file;
2271
2272 /* Return symbol used to separate symbol name from suffix. */
2273 static char symbol_suffix_separator ();
2274
2275 FILE* GTY ((skip)) ipa_clones_dump_file;
2276
2277 hash_set <const cgraph_node *> GTY ((skip)) cloned_nodes;
2278
2279 private:
2280 /* Allocate new callgraph node. */
2281 inline cgraph_node * allocate_cgraph_symbol (void);
2282
2283 /* Allocate a cgraph_edge structure and fill it with data according to the
2284 parameters of which only CALLEE can be NULL (when creating an indirect call
2285 edge). */
2286 cgraph_edge *create_edge (cgraph_node *caller, cgraph_node *callee,
2287 gcall *call_stmt, profile_count count,
2288 bool indir_unknown_callee);
2289
2290 /* Put the edge onto the free list. */
2291 void free_edge (cgraph_edge *e);
2292
2293 /* Insert NODE to assembler name hash. */
2294 void insert_to_assembler_name_hash (symtab_node *node, bool with_clones);
2295
2296 /* Remove NODE from assembler name hash. */
2297 void unlink_from_assembler_name_hash (symtab_node *node, bool with_clones);
2298
2299 /* Hash asmnames ignoring the user specified marks. */
2300 static hashval_t decl_assembler_name_hash (const_tree asmname);
2301
2302 /* Compare ASMNAME with the DECL_ASSEMBLER_NAME of DECL. */
2303 static bool decl_assembler_name_equal (tree decl, const_tree asmname);
2304
2305 friend struct asmname_hasher;
2306
2307 /* List of hooks triggered when an edge is removed. */
2308 cgraph_edge_hook_list * GTY((skip)) m_first_edge_removal_hook;
2309 /* List of hooks triggem_red when a cgraph node is removed. */
2310 cgraph_node_hook_list * GTY((skip)) m_first_cgraph_removal_hook;
2311 /* List of hooks triggered when an edge is duplicated. */
2312 cgraph_2edge_hook_list * GTY((skip)) m_first_edge_duplicated_hook;
2313 /* List of hooks triggered when a node is duplicated. */
2314 cgraph_2node_hook_list * GTY((skip)) m_first_cgraph_duplicated_hook;
2315 /* List of hooks triggered when an function is inserted. */
2316 cgraph_node_hook_list * GTY((skip)) m_first_cgraph_insertion_hook;
2317 /* List of hooks triggered when an variable is inserted. */
2318 varpool_node_hook_list * GTY((skip)) m_first_varpool_insertion_hook;
2319 /* List of hooks triggered when a node is removed. */
2320 varpool_node_hook_list * GTY((skip)) m_first_varpool_removal_hook;
2321 };
2322
2323 extern GTY(()) symbol_table *symtab;
2324
2325 extern vec<cgraph_node *> cgraph_new_nodes;
2326
2327 inline hashval_t
2328 asmname_hasher::hash (symtab_node *n)
2329 {
2330 return symbol_table::decl_assembler_name_hash
2331 (DECL_ASSEMBLER_NAME (n->decl));
2332 }
2333
2334 inline bool
2335 asmname_hasher::equal (symtab_node *n, const_tree t)
2336 {
2337 return symbol_table::decl_assembler_name_equal (n->decl, t);
2338 }
2339
2340 /* In cgraph.c */
2341 void cgraph_c_finalize (void);
2342 void release_function_body (tree);
2343 cgraph_indirect_call_info *cgraph_allocate_init_indirect_info (void);
2344
2345 void cgraph_update_edges_for_call_stmt (gimple *, tree, gimple *);
2346 bool cgraph_function_possibly_inlined_p (tree);
2347
2348 const char* cgraph_inline_failed_string (cgraph_inline_failed_t);
2349 cgraph_inline_failed_type_t cgraph_inline_failed_type (cgraph_inline_failed_t);
2350
2351 extern bool gimple_check_call_matching_types (gimple *, tree, bool);
2352
2353 /* In cgraphunit.c */
2354 void cgraphunit_c_finalize (void);
2355
2356 /* Initialize datastructures so DECL is a function in lowered gimple form.
2357 IN_SSA is true if the gimple is in SSA. */
2358 basic_block init_lowered_empty_function (tree, bool, profile_count);
2359
2360 tree thunk_adjust (gimple_stmt_iterator *, tree, bool, HOST_WIDE_INT, tree);
2361 /* In cgraphclones.c */
2362
2363 tree clone_function_name_1 (const char *, const char *);
2364 tree clone_function_name (tree decl, const char *);
2365
2366 void tree_function_versioning (tree, tree, vec<ipa_replace_map *, va_gc> *,
2367 bool, bitmap, bool, bitmap, basic_block);
2368
2369 void dump_callgraph_transformation (const cgraph_node *original,
2370 const cgraph_node *clone,
2371 const char *suffix);
2372 tree cgraph_build_function_type_skip_args (tree orig_type, bitmap args_to_skip,
2373 bool skip_return);
2374
2375 /* In cgraphbuild.c */
2376 int compute_call_stmt_bb_frequency (tree, basic_block bb);
2377 void record_references_in_initializer (tree, bool);
2378
2379 /* In ipa.c */
2380 void cgraph_build_static_cdtor (char which, tree body, int priority);
2381 bool ipa_discover_readonly_nonaddressable_vars (void);
2382
2383 /* In varpool.c */
2384 tree ctor_for_folding (tree);
2385
2386 /* In tree-chkp.c */
2387 extern bool chkp_function_instrumented_p (tree fndecl);
2388
2389 /* In ipa-inline-analysis.c */
2390 void initialize_inline_failed (struct cgraph_edge *);
2391 bool speculation_useful_p (struct cgraph_edge *e, bool anticipate_inlining);
2392
2393 /* Return true when the symbol is real symbol, i.e. it is not inline clone
2394 or abstract function kept for debug info purposes only. */
2395 inline bool
2396 symtab_node::real_symbol_p (void)
2397 {
2398 cgraph_node *cnode;
2399
2400 if (DECL_ABSTRACT_P (decl))
2401 return false;
2402 if (transparent_alias && definition)
2403 return false;
2404 if (!is_a <cgraph_node *> (this))
2405 return true;
2406 cnode = dyn_cast <cgraph_node *> (this);
2407 if (cnode->global.inlined_to)
2408 return false;
2409 return true;
2410 }
2411
2412 /* Return true if DECL should have entry in symbol table if used.
2413 Those are functions and static & external veriables*/
2414
2415 static inline bool
2416 decl_in_symtab_p (const_tree decl)
2417 {
2418 return (TREE_CODE (decl) == FUNCTION_DECL
2419 || (TREE_CODE (decl) == VAR_DECL
2420 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl))));
2421 }
2422
2423 inline bool
2424 symtab_node::in_same_comdat_group_p (symtab_node *target)
2425 {
2426 symtab_node *source = this;
2427
2428 if (cgraph_node *cn = dyn_cast <cgraph_node *> (target))
2429 {
2430 if (cn->global.inlined_to)
2431 source = cn->global.inlined_to;
2432 }
2433 if (cgraph_node *cn = dyn_cast <cgraph_node *> (target))
2434 {
2435 if (cn->global.inlined_to)
2436 target = cn->global.inlined_to;
2437 }
2438
2439 return source->get_comdat_group () == target->get_comdat_group ();
2440 }
2441
2442 /* Return node that alias is aliasing. */
2443
2444 inline symtab_node *
2445 symtab_node::get_alias_target (void)
2446 {
2447 ipa_ref *ref = NULL;
2448 iterate_reference (0, ref);
2449 if (ref->use == IPA_REF_CHKP)
2450 iterate_reference (1, ref);
2451 gcc_checking_assert (ref->use == IPA_REF_ALIAS);
2452 return ref->referred;
2453 }
2454
2455 /* Return next reachable static symbol with initializer after the node. */
2456
2457 inline symtab_node *
2458 symtab_node::next_defined_symbol (void)
2459 {
2460 symtab_node *node1 = next;
2461
2462 for (; node1; node1 = node1->next)
2463 if (node1->definition)
2464 return node1;
2465
2466 return NULL;
2467 }
2468
2469 /* Iterates I-th reference in the list, REF is also set. */
2470
2471 inline ipa_ref *
2472 symtab_node::iterate_reference (unsigned i, ipa_ref *&ref)
2473 {
2474 vec_safe_iterate (ref_list.references, i, &ref);
2475
2476 return ref;
2477 }
2478
2479 /* Iterates I-th referring item in the list, REF is also set. */
2480
2481 inline ipa_ref *
2482 symtab_node::iterate_referring (unsigned i, ipa_ref *&ref)
2483 {
2484 ref_list.referring.iterate (i, &ref);
2485
2486 return ref;
2487 }
2488
2489 /* Iterates I-th referring alias item in the list, REF is also set. */
2490
2491 inline ipa_ref *
2492 symtab_node::iterate_direct_aliases (unsigned i, ipa_ref *&ref)
2493 {
2494 ref_list.referring.iterate (i, &ref);
2495
2496 if (ref && ref->use != IPA_REF_ALIAS)
2497 return NULL;
2498
2499 return ref;
2500 }
2501
2502 /* Return true if list contains an alias. */
2503
2504 inline bool
2505 symtab_node::has_aliases_p (void)
2506 {
2507 ipa_ref *ref = NULL;
2508
2509 return (iterate_direct_aliases (0, ref) != NULL);
2510 }
2511
2512 /* Return true when RESOLUTION indicate that linker will use
2513 the symbol from non-LTO object files. */
2514
2515 inline bool
2516 resolution_used_from_other_file_p (enum ld_plugin_symbol_resolution resolution)
2517 {
2518 return (resolution == LDPR_PREVAILING_DEF
2519 || resolution == LDPR_PREEMPTED_REG
2520 || resolution == LDPR_RESOLVED_EXEC
2521 || resolution == LDPR_RESOLVED_DYN);
2522 }
2523
2524 /* Return true when symtab_node is known to be used from other (non-LTO)
2525 object file. Known only when doing LTO via linker plugin. */
2526
2527 inline bool
2528 symtab_node::used_from_object_file_p (void)
2529 {
2530 if (!TREE_PUBLIC (decl) || DECL_EXTERNAL (decl))
2531 return false;
2532 if (resolution_used_from_other_file_p (resolution))
2533 return true;
2534 return false;
2535 }
2536
2537 /* Return varpool node for given symbol and check it is a function. */
2538
2539 inline varpool_node *
2540 varpool_node::get (const_tree decl)
2541 {
2542 gcc_checking_assert (TREE_CODE (decl) == VAR_DECL);
2543 return dyn_cast<varpool_node *> (symtab_node::get (decl));
2544 }
2545
2546 /* Register a symbol NODE. */
2547
2548 inline void
2549 symbol_table::register_symbol (symtab_node *node)
2550 {
2551 node->next = nodes;
2552 node->previous = NULL;
2553
2554 if (nodes)
2555 nodes->previous = node;
2556 nodes = node;
2557
2558 node->order = order++;
2559 }
2560
2561 /* Register a top-level asm statement ASM_STR. */
2562
2563 asm_node *
2564 symbol_table::finalize_toplevel_asm (tree asm_str)
2565 {
2566 asm_node *node;
2567
2568 node = ggc_cleared_alloc<asm_node> ();
2569 node->asm_str = asm_str;
2570 node->order = order++;
2571 node->next = NULL;
2572
2573 if (asmnodes == NULL)
2574 asmnodes = node;
2575 else
2576 asm_last_node->next = node;
2577
2578 asm_last_node = node;
2579 return node;
2580 }
2581
2582 /* Unregister a symbol NODE. */
2583 inline void
2584 symbol_table::unregister (symtab_node *node)
2585 {
2586 if (node->previous)
2587 node->previous->next = node->next;
2588 else
2589 nodes = node->next;
2590
2591 if (node->next)
2592 node->next->previous = node->previous;
2593
2594 node->next = NULL;
2595 node->previous = NULL;
2596 }
2597
2598 /* Release a callgraph NODE with UID and put in to the list of free nodes. */
2599
2600 inline void
2601 symbol_table::release_symbol (cgraph_node *node, int uid)
2602 {
2603 cgraph_count--;
2604
2605 /* Clear out the node to NULL all pointers and add the node to the free
2606 list. */
2607 memset (node, 0, sizeof (*node));
2608 node->type = SYMTAB_FUNCTION;
2609 node->uid = uid;
2610 SET_NEXT_FREE_NODE (node, free_nodes);
2611 free_nodes = node;
2612 }
2613
2614 /* Allocate new callgraph node. */
2615
2616 inline cgraph_node *
2617 symbol_table::allocate_cgraph_symbol (void)
2618 {
2619 cgraph_node *node;
2620
2621 if (free_nodes)
2622 {
2623 node = free_nodes;
2624 free_nodes = NEXT_FREE_NODE (node);
2625 }
2626 else
2627 {
2628 node = ggc_cleared_alloc<cgraph_node> ();
2629 node->uid = cgraph_max_uid++;
2630 }
2631
2632 node->summary_uid = cgraph_max_summary_uid++;
2633 return node;
2634 }
2635
2636
2637 /* Return first static symbol with definition. */
2638 inline symtab_node *
2639 symbol_table::first_symbol (void)
2640 {
2641 return nodes;
2642 }
2643
2644 /* Walk all symbols. */
2645 #define FOR_EACH_SYMBOL(node) \
2646 for ((node) = symtab->first_symbol (); (node); (node) = (node)->next)
2647
2648 /* Return first static symbol with definition. */
2649 inline symtab_node *
2650 symbol_table::first_defined_symbol (void)
2651 {
2652 symtab_node *node;
2653
2654 for (node = nodes; node; node = node->next)
2655 if (node->definition)
2656 return node;
2657
2658 return NULL;
2659 }
2660
2661 /* Walk all symbols with definitions in current unit. */
2662 #define FOR_EACH_DEFINED_SYMBOL(node) \
2663 for ((node) = symtab->first_defined_symbol (); (node); \
2664 (node) = node->next_defined_symbol ())
2665
2666 /* Return first variable. */
2667 inline varpool_node *
2668 symbol_table::first_variable (void)
2669 {
2670 symtab_node *node;
2671 for (node = nodes; node; node = node->next)
2672 if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
2673 return vnode;
2674 return NULL;
2675 }
2676
2677 /* Return next variable after NODE. */
2678 inline varpool_node *
2679 symbol_table::next_variable (varpool_node *node)
2680 {
2681 symtab_node *node1 = node->next;
2682 for (; node1; node1 = node1->next)
2683 if (varpool_node *vnode1 = dyn_cast <varpool_node *> (node1))
2684 return vnode1;
2685 return NULL;
2686 }
2687 /* Walk all variables. */
2688 #define FOR_EACH_VARIABLE(node) \
2689 for ((node) = symtab->first_variable (); \
2690 (node); \
2691 (node) = symtab->next_variable ((node)))
2692
2693 /* Return first static variable with initializer. */
2694 inline varpool_node *
2695 symbol_table::first_static_initializer (void)
2696 {
2697 symtab_node *node;
2698 for (node = nodes; node; node = node->next)
2699 {
2700 varpool_node *vnode = dyn_cast <varpool_node *> (node);
2701 if (vnode && DECL_INITIAL (node->decl))
2702 return vnode;
2703 }
2704 return NULL;
2705 }
2706
2707 /* Return next static variable with initializer after NODE. */
2708 inline varpool_node *
2709 symbol_table::next_static_initializer (varpool_node *node)
2710 {
2711 symtab_node *node1 = node->next;
2712 for (; node1; node1 = node1->next)
2713 {
2714 varpool_node *vnode1 = dyn_cast <varpool_node *> (node1);
2715 if (vnode1 && DECL_INITIAL (node1->decl))
2716 return vnode1;
2717 }
2718 return NULL;
2719 }
2720
2721 /* Walk all static variables with initializer set. */
2722 #define FOR_EACH_STATIC_INITIALIZER(node) \
2723 for ((node) = symtab->first_static_initializer (); (node); \
2724 (node) = symtab->next_static_initializer (node))
2725
2726 /* Return first static variable with definition. */
2727 inline varpool_node *
2728 symbol_table::first_defined_variable (void)
2729 {
2730 symtab_node *node;
2731 for (node = nodes; node; node = node->next)
2732 {
2733 varpool_node *vnode = dyn_cast <varpool_node *> (node);
2734 if (vnode && vnode->definition)
2735 return vnode;
2736 }
2737 return NULL;
2738 }
2739
2740 /* Return next static variable with definition after NODE. */
2741 inline varpool_node *
2742 symbol_table::next_defined_variable (varpool_node *node)
2743 {
2744 symtab_node *node1 = node->next;
2745 for (; node1; node1 = node1->next)
2746 {
2747 varpool_node *vnode1 = dyn_cast <varpool_node *> (node1);
2748 if (vnode1 && vnode1->definition)
2749 return vnode1;
2750 }
2751 return NULL;
2752 }
2753 /* Walk all variables with definitions in current unit. */
2754 #define FOR_EACH_DEFINED_VARIABLE(node) \
2755 for ((node) = symtab->first_defined_variable (); (node); \
2756 (node) = symtab->next_defined_variable (node))
2757
2758 /* Return first function with body defined. */
2759 inline cgraph_node *
2760 symbol_table::first_defined_function (void)
2761 {
2762 symtab_node *node;
2763 for (node = nodes; node; node = node->next)
2764 {
2765 cgraph_node *cn = dyn_cast <cgraph_node *> (node);
2766 if (cn && cn->definition)
2767 return cn;
2768 }
2769 return NULL;
2770 }
2771
2772 /* Return next function with body defined after NODE. */
2773 inline cgraph_node *
2774 symbol_table::next_defined_function (cgraph_node *node)
2775 {
2776 symtab_node *node1 = node->next;
2777 for (; node1; node1 = node1->next)
2778 {
2779 cgraph_node *cn1 = dyn_cast <cgraph_node *> (node1);
2780 if (cn1 && cn1->definition)
2781 return cn1;
2782 }
2783 return NULL;
2784 }
2785
2786 /* Walk all functions with body defined. */
2787 #define FOR_EACH_DEFINED_FUNCTION(node) \
2788 for ((node) = symtab->first_defined_function (); (node); \
2789 (node) = symtab->next_defined_function ((node)))
2790
2791 /* Return first function. */
2792 inline cgraph_node *
2793 symbol_table::first_function (void)
2794 {
2795 symtab_node *node;
2796 for (node = nodes; node; node = node->next)
2797 if (cgraph_node *cn = dyn_cast <cgraph_node *> (node))
2798 return cn;
2799 return NULL;
2800 }
2801
2802 /* Return next function. */
2803 inline cgraph_node *
2804 symbol_table::next_function (cgraph_node *node)
2805 {
2806 symtab_node *node1 = node->next;
2807 for (; node1; node1 = node1->next)
2808 if (cgraph_node *cn1 = dyn_cast <cgraph_node *> (node1))
2809 return cn1;
2810 return NULL;
2811 }
2812
2813 /* Return first function with body defined. */
2814 inline cgraph_node *
2815 symbol_table::first_function_with_gimple_body (void)
2816 {
2817 symtab_node *node;
2818 for (node = nodes; node; node = node->next)
2819 {
2820 cgraph_node *cn = dyn_cast <cgraph_node *> (node);
2821 if (cn && cn->has_gimple_body_p ())
2822 return cn;
2823 }
2824 return NULL;
2825 }
2826
2827 /* Return next reachable static variable with initializer after NODE. */
2828 inline cgraph_node *
2829 symbol_table::next_function_with_gimple_body (cgraph_node *node)
2830 {
2831 symtab_node *node1 = node->next;
2832 for (; node1; node1 = node1->next)
2833 {
2834 cgraph_node *cn1 = dyn_cast <cgraph_node *> (node1);
2835 if (cn1 && cn1->has_gimple_body_p ())
2836 return cn1;
2837 }
2838 return NULL;
2839 }
2840
2841 /* Walk all functions. */
2842 #define FOR_EACH_FUNCTION(node) \
2843 for ((node) = symtab->first_function (); (node); \
2844 (node) = symtab->next_function ((node)))
2845
2846 /* Return true when callgraph node is a function with Gimple body defined
2847 in current unit. Functions can also be define externally or they
2848 can be thunks with no Gimple representation.
2849
2850 Note that at WPA stage, the function body may not be present in memory. */
2851
2852 inline bool
2853 cgraph_node::has_gimple_body_p (void)
2854 {
2855 return definition && !thunk.thunk_p && !alias;
2856 }
2857
2858 /* Walk all functions with body defined. */
2859 #define FOR_EACH_FUNCTION_WITH_GIMPLE_BODY(node) \
2860 for ((node) = symtab->first_function_with_gimple_body (); (node); \
2861 (node) = symtab->next_function_with_gimple_body (node))
2862
2863 /* Uniquize all constants that appear in memory.
2864 Each constant in memory thus far output is recorded
2865 in `const_desc_table'. */
2866
2867 struct GTY((for_user)) constant_descriptor_tree {
2868 /* A MEM for the constant. */
2869 rtx rtl;
2870
2871 /* The value of the constant. */
2872 tree value;
2873
2874 /* Hash of value. Computing the hash from value each time
2875 hashfn is called can't work properly, as that means recursive
2876 use of the hash table during hash table expansion. */
2877 hashval_t hash;
2878 };
2879
2880 /* Return true when function is only called directly or it has alias.
2881 i.e. it is not externally visible, address was not taken and
2882 it is not used in any other non-standard way. */
2883
2884 inline bool
2885 cgraph_node::only_called_directly_or_aliased_p (void)
2886 {
2887 gcc_assert (!global.inlined_to);
2888 return (!force_output && !address_taken
2889 && !used_from_other_partition
2890 && !DECL_VIRTUAL_P (decl)
2891 && !DECL_STATIC_CONSTRUCTOR (decl)
2892 && !DECL_STATIC_DESTRUCTOR (decl)
2893 && !used_from_object_file_p ()
2894 && !externally_visible);
2895 }
2896
2897 /* Return true when function can be removed from callgraph
2898 if all direct calls are eliminated. */
2899
2900 inline bool
2901 cgraph_node::can_remove_if_no_direct_calls_and_refs_p (void)
2902 {
2903 gcc_checking_assert (!global.inlined_to);
2904 /* Instrumentation clones should not be removed before
2905 instrumentation happens. New callers may appear after
2906 instrumentation. */
2907 if (instrumentation_clone
2908 && !chkp_function_instrumented_p (decl))
2909 return false;
2910 /* Extern inlines can always go, we will use the external definition. */
2911 if (DECL_EXTERNAL (decl))
2912 return true;
2913 /* When function is needed, we can not remove it. */
2914 if (force_output || used_from_other_partition)
2915 return false;
2916 if (DECL_STATIC_CONSTRUCTOR (decl)
2917 || DECL_STATIC_DESTRUCTOR (decl))
2918 return false;
2919 /* Only COMDAT functions can be removed if externally visible. */
2920 if (externally_visible
2921 && (!DECL_COMDAT (decl)
2922 || forced_by_abi
2923 || used_from_object_file_p ()))
2924 return false;
2925 return true;
2926 }
2927
2928 /* Verify cgraph, if consistency checking is enabled. */
2929
2930 inline void
2931 cgraph_node::checking_verify_cgraph_nodes (void)
2932 {
2933 if (flag_checking)
2934 cgraph_node::verify_cgraph_nodes ();
2935 }
2936
2937 /* Return true when variable can be removed from variable pool
2938 if all direct calls are eliminated. */
2939
2940 inline bool
2941 varpool_node::can_remove_if_no_refs_p (void)
2942 {
2943 if (DECL_EXTERNAL (decl))
2944 return true;
2945 return (!force_output && !used_from_other_partition
2946 && ((DECL_COMDAT (decl)
2947 && !forced_by_abi
2948 && !used_from_object_file_p ())
2949 || !externally_visible
2950 || DECL_HAS_VALUE_EXPR_P (decl)));
2951 }
2952
2953 /* Return true when all references to variable must be visible in ipa_ref_list.
2954 i.e. if the variable is not externally visible or not used in some magic
2955 way (asm statement or such).
2956 The magic uses are all summarized in force_output flag. */
2957
2958 inline bool
2959 varpool_node::all_refs_explicit_p ()
2960 {
2961 return (definition
2962 && !externally_visible
2963 && !used_from_other_partition
2964 && !force_output);
2965 }
2966
2967 struct tree_descriptor_hasher : ggc_ptr_hash<constant_descriptor_tree>
2968 {
2969 static hashval_t hash (constant_descriptor_tree *);
2970 static bool equal (constant_descriptor_tree *, constant_descriptor_tree *);
2971 };
2972
2973 /* Constant pool accessor function. */
2974 hash_table<tree_descriptor_hasher> *constant_pool_htab (void);
2975
2976 /* Return node that alias is aliasing. */
2977
2978 inline cgraph_node *
2979 cgraph_node::get_alias_target (void)
2980 {
2981 return dyn_cast <cgraph_node *> (symtab_node::get_alias_target ());
2982 }
2983
2984 /* Return node that alias is aliasing. */
2985
2986 inline varpool_node *
2987 varpool_node::get_alias_target (void)
2988 {
2989 return dyn_cast <varpool_node *> (symtab_node::get_alias_target ());
2990 }
2991
2992 /* Walk the alias chain to return the symbol NODE is alias of.
2993 If NODE is not an alias, return NODE.
2994 When AVAILABILITY is non-NULL, get minimal availability in the chain.
2995 When REF is non-NULL, assume that reference happens in symbol REF
2996 when determining the availability. */
2997
2998 inline symtab_node *
2999 symtab_node::ultimate_alias_target (enum availability *availability,
3000 symtab_node *ref)
3001 {
3002 if (!alias)
3003 {
3004 if (availability)
3005 *availability = get_availability (ref);
3006 return this;
3007 }
3008
3009 return ultimate_alias_target_1 (availability, ref);
3010 }
3011
3012 /* Given function symbol, walk the alias chain to return the function node
3013 is alias of. Do not walk through thunks.
3014 When AVAILABILITY is non-NULL, get minimal availability in the chain.
3015 When REF is non-NULL, assume that reference happens in symbol REF
3016 when determining the availability. */
3017
3018 inline cgraph_node *
3019 cgraph_node::ultimate_alias_target (enum availability *availability,
3020 symtab_node *ref)
3021 {
3022 cgraph_node *n = dyn_cast <cgraph_node *>
3023 (symtab_node::ultimate_alias_target (availability, ref));
3024 if (!n && availability)
3025 *availability = AVAIL_NOT_AVAILABLE;
3026 return n;
3027 }
3028
3029 /* For given variable pool node, walk the alias chain to return the function
3030 the variable is alias of. Do not walk through thunks.
3031 When AVAILABILITY is non-NULL, get minimal availability in the chain.
3032 When REF is non-NULL, assume that reference happens in symbol REF
3033 when determining the availability. */
3034
3035 inline varpool_node *
3036 varpool_node::ultimate_alias_target (availability *availability,
3037 symtab_node *ref)
3038 {
3039 varpool_node *n = dyn_cast <varpool_node *>
3040 (symtab_node::ultimate_alias_target (availability, ref));
3041
3042 if (!n && availability)
3043 *availability = AVAIL_NOT_AVAILABLE;
3044 return n;
3045 }
3046
3047 /* Set callee N of call graph edge and add it to the corresponding set of
3048 callers. */
3049
3050 inline void
3051 cgraph_edge::set_callee (cgraph_node *n)
3052 {
3053 prev_caller = NULL;
3054 if (n->callers)
3055 n->callers->prev_caller = this;
3056 next_caller = n->callers;
3057 n->callers = this;
3058 callee = n;
3059 }
3060
3061 /* Redirect callee of the edge to N. The function does not update underlying
3062 call expression. */
3063
3064 inline void
3065 cgraph_edge::redirect_callee (cgraph_node *n)
3066 {
3067 /* Remove from callers list of the current callee. */
3068 remove_callee ();
3069
3070 /* Insert to callers list of the new callee. */
3071 set_callee (n);
3072 }
3073
3074 /* Return true when the edge represents a direct recursion. */
3075
3076 inline bool
3077 cgraph_edge::recursive_p (void)
3078 {
3079 cgraph_node *c = callee->ultimate_alias_target ();
3080 if (caller->global.inlined_to)
3081 return caller->global.inlined_to->decl == c->decl;
3082 else
3083 return caller->decl == c->decl;
3084 }
3085
3086 /* Remove the edge from the list of the callers of the callee. */
3087
3088 inline void
3089 cgraph_edge::remove_callee (void)
3090 {
3091 gcc_assert (!indirect_unknown_callee);
3092 if (prev_caller)
3093 prev_caller->next_caller = next_caller;
3094 if (next_caller)
3095 next_caller->prev_caller = prev_caller;
3096 if (!prev_caller)
3097 callee->callers = next_caller;
3098 }
3099
3100 /* Return true if call must bind to current definition. */
3101
3102 inline bool
3103 cgraph_edge::binds_to_current_def_p ()
3104 {
3105 if (callee)
3106 return callee->binds_to_current_def_p (caller);
3107 else
3108 return false;
3109 }
3110
3111 /* Expected frequency of executions within the function.
3112 When set to CGRAPH_FREQ_BASE, the edge is expected to be called once
3113 per function call. The range is 0 to CGRAPH_FREQ_MAX. */
3114
3115 inline int
3116 cgraph_edge::frequency ()
3117 {
3118 return count.to_cgraph_frequency (caller->global.inlined_to
3119 ? caller->global.inlined_to->count
3120 : caller->count);
3121 }
3122
3123
3124 /* Return true if the TM_CLONE bit is set for a given FNDECL. */
3125 static inline bool
3126 decl_is_tm_clone (const_tree fndecl)
3127 {
3128 cgraph_node *n = cgraph_node::get (fndecl);
3129 if (n)
3130 return n->tm_clone;
3131 return false;
3132 }
3133
3134 /* Likewise indicate that a node is needed, i.e. reachable via some
3135 external means. */
3136
3137 inline void
3138 cgraph_node::mark_force_output (void)
3139 {
3140 force_output = 1;
3141 gcc_checking_assert (!global.inlined_to);
3142 }
3143
3144 /* Return true if function should be optimized for size. */
3145
3146 inline bool
3147 cgraph_node::optimize_for_size_p (void)
3148 {
3149 if (opt_for_fn (decl, optimize_size))
3150 return true;
3151 if (frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
3152 return true;
3153 else
3154 return false;
3155 }
3156
3157 /* Return symtab_node for NODE or create one if it is not present
3158 in symtab. */
3159
3160 inline symtab_node *
3161 symtab_node::get_create (tree node)
3162 {
3163 if (TREE_CODE (node) == VAR_DECL)
3164 return varpool_node::get_create (node);
3165 else
3166 return cgraph_node::get_create (node);
3167 }
3168
3169 /* Return availability of NODE when referenced from REF. */
3170
3171 inline enum availability
3172 symtab_node::get_availability (symtab_node *ref)
3173 {
3174 if (is_a <cgraph_node *> (this))
3175 return dyn_cast <cgraph_node *> (this)->get_availability (ref);
3176 else
3177 return dyn_cast <varpool_node *> (this)->get_availability (ref);
3178 }
3179
3180 /* Call calback on symtab node and aliases associated to this node.
3181 When INCLUDE_OVERWRITABLE is false, overwritable symbols are skipped. */
3182
3183 inline bool
3184 symtab_node::call_for_symbol_and_aliases (bool (*callback) (symtab_node *,
3185 void *),
3186 void *data,
3187 bool include_overwritable)
3188 {
3189 if (include_overwritable
3190 || get_availability () > AVAIL_INTERPOSABLE)
3191 {
3192 if (callback (this, data))
3193 return true;
3194 }
3195 if (has_aliases_p ())
3196 return call_for_symbol_and_aliases_1 (callback, data, include_overwritable);
3197 return false;
3198 }
3199
3200 /* Call callback on function and aliases associated to the function.
3201 When INCLUDE_OVERWRITABLE is false, overwritable symbols are
3202 skipped. */
3203
3204 inline bool
3205 cgraph_node::call_for_symbol_and_aliases (bool (*callback) (cgraph_node *,
3206 void *),
3207 void *data,
3208 bool include_overwritable)
3209 {
3210 if (include_overwritable
3211 || get_availability () > AVAIL_INTERPOSABLE)
3212 {
3213 if (callback (this, data))
3214 return true;
3215 }
3216 if (has_aliases_p ())
3217 return call_for_symbol_and_aliases_1 (callback, data, include_overwritable);
3218 return false;
3219 }
3220
3221 /* Call calback on varpool symbol and aliases associated to varpool symbol.
3222 When INCLUDE_OVERWRITABLE is false, overwritable symbols are
3223 skipped. */
3224
3225 inline bool
3226 varpool_node::call_for_symbol_and_aliases (bool (*callback) (varpool_node *,
3227 void *),
3228 void *data,
3229 bool include_overwritable)
3230 {
3231 if (include_overwritable
3232 || get_availability () > AVAIL_INTERPOSABLE)
3233 {
3234 if (callback (this, data))
3235 return true;
3236 }
3237 if (has_aliases_p ())
3238 return call_for_symbol_and_aliases_1 (callback, data, include_overwritable);
3239 return false;
3240 }
3241
3242 /* Return true if refernece may be used in address compare. */
3243
3244 inline bool
3245 ipa_ref::address_matters_p ()
3246 {
3247 if (use != IPA_REF_ADDR)
3248 return false;
3249 /* Addresses taken from virtual tables are never compared. */
3250 if (is_a <varpool_node *> (referring)
3251 && DECL_VIRTUAL_P (referring->decl))
3252 return false;
3253 return referred->address_can_be_compared_p ();
3254 }
3255
3256 /* Build polymorphic call context for indirect call E. */
3257
3258 inline
3259 ipa_polymorphic_call_context::ipa_polymorphic_call_context (cgraph_edge *e)
3260 {
3261 gcc_checking_assert (e->indirect_info->polymorphic);
3262 *this = e->indirect_info->context;
3263 }
3264
3265 /* Build empty "I know nothing" context. */
3266
3267 inline
3268 ipa_polymorphic_call_context::ipa_polymorphic_call_context ()
3269 {
3270 clear_speculation ();
3271 clear_outer_type ();
3272 invalid = false;
3273 }
3274
3275 /* Make context non-speculative. */
3276
3277 inline void
3278 ipa_polymorphic_call_context::clear_speculation ()
3279 {
3280 speculative_outer_type = NULL;
3281 speculative_offset = 0;
3282 speculative_maybe_derived_type = false;
3283 }
3284
3285 /* Produce context specifying all derrived types of OTR_TYPE. If OTR_TYPE is
3286 NULL, the context is set to dummy "I know nothing" setting. */
3287
3288 inline void
3289 ipa_polymorphic_call_context::clear_outer_type (tree otr_type)
3290 {
3291 outer_type = otr_type ? TYPE_MAIN_VARIANT (otr_type) : NULL;
3292 offset = 0;
3293 maybe_derived_type = true;
3294 maybe_in_construction = true;
3295 dynamic = true;
3296 }
3297
3298 /* Adjust all offsets in contexts by OFF bits. */
3299
3300 inline void
3301 ipa_polymorphic_call_context::offset_by (HOST_WIDE_INT off)
3302 {
3303 if (outer_type)
3304 offset += off;
3305 if (speculative_outer_type)
3306 speculative_offset += off;
3307 }
3308
3309 /* Return TRUE if context is fully useless. */
3310
3311 inline bool
3312 ipa_polymorphic_call_context::useless_p () const
3313 {
3314 return (!outer_type && !speculative_outer_type);
3315 }
3316
3317 /* Return true if NODE is local. Instrumentation clones are counted as local
3318 only when original function is local. */
3319
3320 static inline bool
3321 cgraph_local_p (cgraph_node *node)
3322 {
3323 if (!node->instrumentation_clone || !node->instrumented_version)
3324 return node->local.local;
3325
3326 return node->local.local && node->instrumented_version->local.local;
3327 }
3328
3329 /* When using fprintf (or similar), problems can arise with
3330 transient generated strings. Many string-generation APIs
3331 only support one result being alive at once (e.g. by
3332 returning a pointer to a statically-allocated buffer).
3333
3334 If there is more than one generated string within one
3335 fprintf call: the first string gets evicted or overwritten
3336 by the second, before fprintf is fully evaluated.
3337 See e.g. PR/53136.
3338
3339 This function provides a workaround for this, by providing
3340 a simple way to create copies of these transient strings,
3341 without the need to have explicit cleanup:
3342
3343 fprintf (dumpfile, "string 1: %s string 2:%s\n",
3344 xstrdup_for_dump (EXPR_1),
3345 xstrdup_for_dump (EXPR_2));
3346
3347 This is actually a simple wrapper around ggc_strdup, but
3348 the name documents the intent. We require that no GC can occur
3349 within the fprintf call. */
3350
3351 static inline const char *
3352 xstrdup_for_dump (const char *transient_str)
3353 {
3354 return ggc_strdup (transient_str);
3355 }
3356
3357 #endif /* GCC_CGRAPH_H */