re PR c/79153 (-Wimplicit-fallthrough missed warning)
[gcc.git] / gcc / cgraph.h
1 /* Callgraph handling code.
2 Copyright (C) 2003-2017 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #ifndef GCC_CGRAPH_H
22 #define GCC_CGRAPH_H
23
24 #include "profile-count.h"
25 #include "ipa-ref.h"
26 #include "plugin-api.h"
27
28 class ipa_opt_pass_d;
29 typedef ipa_opt_pass_d *ipa_opt_pass;
30
31 /* Symbol table consists of functions and variables.
32 TODO: add labels and CONST_DECLs. */
33 enum symtab_type
34 {
35 SYMTAB_SYMBOL,
36 SYMTAB_FUNCTION,
37 SYMTAB_VARIABLE
38 };
39
40 /* Section names are stored as reference counted strings in GGC safe hashtable
41 (to make them survive through PCH). */
42
43 struct GTY((for_user)) section_hash_entry
44 {
45 int ref_count;
46 char *name; /* As long as this datastructure stays in GGC, we can not put
47 string at the tail of structure of GGC dies in horrible
48 way */
49 };
50
51 struct section_name_hasher : ggc_ptr_hash<section_hash_entry>
52 {
53 typedef const char *compare_type;
54
55 static hashval_t hash (section_hash_entry *);
56 static bool equal (section_hash_entry *, const char *);
57 };
58
59 enum availability
60 {
61 /* Not yet set by cgraph_function_body_availability. */
62 AVAIL_UNSET,
63 /* Function body/variable initializer is unknown. */
64 AVAIL_NOT_AVAILABLE,
65 /* Function body/variable initializer is known but might be replaced
66 by a different one from other compilation unit and thus needs to
67 be dealt with a care. Like AVAIL_NOT_AVAILABLE it can have
68 arbitrary side effects on escaping variables and functions, while
69 like AVAILABLE it might access static variables. */
70 AVAIL_INTERPOSABLE,
71 /* Function body/variable initializer is known and will be used in final
72 program. */
73 AVAIL_AVAILABLE,
74 /* Function body/variable initializer is known and all it's uses are
75 explicitly visible within current unit (ie it's address is never taken and
76 it is not exported to other units). Currently used only for functions. */
77 AVAIL_LOCAL
78 };
79
80 /* Classification of symbols WRT partitioning. */
81 enum symbol_partitioning_class
82 {
83 /* External declarations are ignored by partitioning algorithms and they are
84 added into the boundary later via compute_ltrans_boundary. */
85 SYMBOL_EXTERNAL,
86 /* Partitioned symbols are pur into one of partitions. */
87 SYMBOL_PARTITION,
88 /* Duplicated symbols (such as comdat or constant pool references) are
89 copied into every node needing them via add_symbol_to_partition. */
90 SYMBOL_DUPLICATE
91 };
92
93 /* Base of all entries in the symbol table.
94 The symtab_node is inherited by cgraph and varpol nodes. */
95 class GTY((desc ("%h.type"), tag ("SYMTAB_SYMBOL"),
96 chain_next ("%h.next"), chain_prev ("%h.previous")))
97 symtab_node
98 {
99 public:
100 /* Return name. */
101 const char *name () const;
102
103 /* Return dump name. */
104 const char *dump_name () const;
105
106 /* Return asm name. */
107 const char *asm_name () const;
108
109 /* Return dump name with assembler name. */
110 const char *dump_asm_name () const;
111
112 /* Add node into symbol table. This function is not used directly, but via
113 cgraph/varpool node creation routines. */
114 void register_symbol (void);
115
116 /* Remove symbol from symbol table. */
117 void remove (void);
118
119 /* Dump symtab node to F. */
120 void dump (FILE *f);
121
122 /* Dump symtab node to stderr. */
123 void DEBUG_FUNCTION debug (void);
124
125 /* Verify consistency of node. */
126 void DEBUG_FUNCTION verify (void);
127
128 /* Return ipa reference from this symtab_node to
129 REFERED_NODE or REFERED_VARPOOL_NODE. USE_TYPE specify type
130 of the use and STMT the statement (if it exists). */
131 ipa_ref *create_reference (symtab_node *referred_node,
132 enum ipa_ref_use use_type);
133
134 /* Return ipa reference from this symtab_node to
135 REFERED_NODE or REFERED_VARPOOL_NODE. USE_TYPE specify type
136 of the use and STMT the statement (if it exists). */
137 ipa_ref *create_reference (symtab_node *referred_node,
138 enum ipa_ref_use use_type, gimple *stmt);
139
140 /* If VAL is a reference to a function or a variable, add a reference from
141 this symtab_node to the corresponding symbol table node. Return the new
142 reference or NULL if none was created. */
143 ipa_ref *maybe_create_reference (tree val, gimple *stmt);
144
145 /* Clone all references from symtab NODE to this symtab_node. */
146 void clone_references (symtab_node *node);
147
148 /* Remove all stmt references in non-speculative references.
149 Those are not maintained during inlining & clonning.
150 The exception are speculative references that are updated along
151 with callgraph edges associated with them. */
152 void clone_referring (symtab_node *node);
153
154 /* Clone reference REF to this symtab_node and set its stmt to STMT. */
155 ipa_ref *clone_reference (ipa_ref *ref, gimple *stmt);
156
157 /* Find the structure describing a reference to REFERRED_NODE
158 and associated with statement STMT. */
159 ipa_ref *find_reference (symtab_node *referred_node, gimple *stmt,
160 unsigned int lto_stmt_uid);
161
162 /* Remove all references that are associated with statement STMT. */
163 void remove_stmt_references (gimple *stmt);
164
165 /* Remove all stmt references in non-speculative references.
166 Those are not maintained during inlining & clonning.
167 The exception are speculative references that are updated along
168 with callgraph edges associated with them. */
169 void clear_stmts_in_references (void);
170
171 /* Remove all references in ref list. */
172 void remove_all_references (void);
173
174 /* Remove all referring items in ref list. */
175 void remove_all_referring (void);
176
177 /* Dump references in ref list to FILE. */
178 void dump_references (FILE *file);
179
180 /* Dump referring in list to FILE. */
181 void dump_referring (FILE *);
182
183 /* Get number of references for this node. */
184 inline unsigned num_references (void)
185 {
186 return ref_list.references ? ref_list.references->length () : 0;
187 }
188
189 /* Iterates I-th reference in the list, REF is also set. */
190 ipa_ref *iterate_reference (unsigned i, ipa_ref *&ref);
191
192 /* Iterates I-th referring item in the list, REF is also set. */
193 ipa_ref *iterate_referring (unsigned i, ipa_ref *&ref);
194
195 /* Iterates I-th referring alias item in the list, REF is also set. */
196 ipa_ref *iterate_direct_aliases (unsigned i, ipa_ref *&ref);
197
198 /* Return true if symtab node and TARGET represents
199 semantically equivalent symbols. */
200 bool semantically_equivalent_p (symtab_node *target);
201
202 /* Classify symbol symtab node for partitioning. */
203 enum symbol_partitioning_class get_partitioning_class (void);
204
205 /* Return comdat group. */
206 tree get_comdat_group ()
207 {
208 return x_comdat_group;
209 }
210
211 /* Return comdat group as identifier_node. */
212 tree get_comdat_group_id ()
213 {
214 if (x_comdat_group && TREE_CODE (x_comdat_group) != IDENTIFIER_NODE)
215 x_comdat_group = DECL_ASSEMBLER_NAME (x_comdat_group);
216 return x_comdat_group;
217 }
218
219 /* Set comdat group. */
220 void set_comdat_group (tree group)
221 {
222 gcc_checking_assert (!group || TREE_CODE (group) == IDENTIFIER_NODE
223 || DECL_P (group));
224 x_comdat_group = group;
225 }
226
227 /* Return section as string. */
228 const char * get_section ()
229 {
230 if (!x_section)
231 return NULL;
232 return x_section->name;
233 }
234
235 /* Remove node from same comdat group. */
236 void remove_from_same_comdat_group (void);
237
238 /* Add this symtab_node to the same comdat group that OLD is in. */
239 void add_to_same_comdat_group (symtab_node *old_node);
240
241 /* Dissolve the same_comdat_group list in which NODE resides. */
242 void dissolve_same_comdat_group_list (void);
243
244 /* Return true when symtab_node is known to be used from other (non-LTO)
245 object file. Known only when doing LTO via linker plugin. */
246 bool used_from_object_file_p (void);
247
248 /* Walk the alias chain to return the symbol NODE is alias of.
249 If NODE is not an alias, return NODE.
250 When AVAILABILITY is non-NULL, get minimal availability in the chain.
251 When REF is non-NULL, assume that reference happens in symbol REF
252 when determining the availability. */
253 symtab_node *ultimate_alias_target (enum availability *avail = NULL,
254 struct symtab_node *ref = NULL);
255
256 /* Return next reachable static symbol with initializer after NODE. */
257 inline symtab_node *next_defined_symbol (void);
258
259 /* Add reference recording that symtab node is alias of TARGET.
260 If TRANSPARENT is true make the alias to be transparent alias.
261 The function can fail in the case of aliasing cycles; in this case
262 it returns false. */
263 bool resolve_alias (symtab_node *target, bool transparent = false);
264
265 /* C++ FE sometimes change linkage flags after producing same
266 body aliases. */
267 void fixup_same_cpp_alias_visibility (symtab_node *target);
268
269 /* Call callback on symtab node and aliases associated to this node.
270 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
271 skipped. */
272 bool call_for_symbol_and_aliases (bool (*callback) (symtab_node *, void *),
273 void *data,
274 bool include_overwrite);
275
276 /* If node can not be interposable by static or dynamic linker to point to
277 different definition, return this symbol. Otherwise look for alias with
278 such property and if none exists, introduce new one. */
279 symtab_node *noninterposable_alias (void);
280
281 /* Return node that alias is aliasing. */
282 inline symtab_node *get_alias_target (void);
283
284 /* Set section for symbol and its aliases. */
285 void set_section (const char *section);
286
287 /* Set section, do not recurse into aliases.
288 When one wants to change section of symbol and its aliases,
289 use set_section. */
290 void set_section_for_node (const char *section);
291
292 /* Set initialization priority to PRIORITY. */
293 void set_init_priority (priority_type priority);
294
295 /* Return the initialization priority. */
296 priority_type get_init_priority ();
297
298 /* Return availability of NODE when referenced from REF. */
299 enum availability get_availability (symtab_node *ref = NULL);
300
301 /* Return true if NODE binds to current definition in final executable
302 when referenced from REF. If REF is NULL return conservative value
303 for any reference. */
304 bool binds_to_current_def_p (symtab_node *ref = NULL);
305
306 /* Make DECL local. */
307 void make_decl_local (void);
308
309 /* Copy visibility from N. */
310 void copy_visibility_from (symtab_node *n);
311
312 /* Return desired alignment of the definition. This is NOT alignment useful
313 to access THIS, because THIS may be interposable and DECL_ALIGN should
314 be used instead. It however must be guaranteed when output definition
315 of THIS. */
316 unsigned int definition_alignment ();
317
318 /* Return true if alignment can be increased. */
319 bool can_increase_alignment_p ();
320
321 /* Increase alignment of symbol to ALIGN. */
322 void increase_alignment (unsigned int align);
323
324 /* Return true if list contains an alias. */
325 bool has_aliases_p (void);
326
327 /* Return true when the symbol is real symbol, i.e. it is not inline clone
328 or abstract function kept for debug info purposes only. */
329 bool real_symbol_p (void);
330
331 /* Determine if symbol declaration is needed. That is, visible to something
332 either outside this translation unit, something magic in the system
333 configury. This function is used just during symbol creation. */
334 bool needed_p (void);
335
336 /* Return true if this symbol is a function from the C frontend specified
337 directly in RTL form (with "__RTL"). */
338 bool native_rtl_p () const;
339
340 /* Return true when there are references to the node. */
341 bool referred_to_p (bool include_self = true);
342
343 /* Return true if symbol can be discarded by linker from the binary.
344 Assume that symbol is used (so there is no need to take into account
345 garbage collecting linkers)
346
347 This can happen for comdats, commons and weaks when they are previaled
348 by other definition at static linking time. */
349 inline bool
350 can_be_discarded_p (void)
351 {
352 return (DECL_EXTERNAL (decl)
353 || ((get_comdat_group ()
354 || DECL_COMMON (decl)
355 || (DECL_SECTION_NAME (decl) && DECL_WEAK (decl)))
356 && ((resolution != LDPR_PREVAILING_DEF
357 && resolution != LDPR_PREVAILING_DEF_IRONLY_EXP)
358 || flag_incremental_link)
359 && resolution != LDPR_PREVAILING_DEF_IRONLY));
360 }
361
362 /* Return true if NODE is local to a particular COMDAT group, and must not
363 be named from outside the COMDAT. This is used for C++ decloned
364 constructors. */
365 inline bool comdat_local_p (void)
366 {
367 return (same_comdat_group && !TREE_PUBLIC (decl));
368 }
369
370 /* Return true if ONE and TWO are part of the same COMDAT group. */
371 inline bool in_same_comdat_group_p (symtab_node *target);
372
373 /* Return true if symbol is known to be nonzero. */
374 bool nonzero_address ();
375
376 /* Return 0 if symbol is known to have different address than S2,
377 Return 1 if symbol is known to have same address as S2,
378 return 2 otherwise.
379
380 If MEMORY_ACCESSED is true, assume that both memory pointer to THIS
381 and S2 is going to be accessed. This eliminates the situations when
382 either THIS or S2 is NULL and is seful for comparing bases when deciding
383 about memory aliasing. */
384 int equal_address_to (symtab_node *s2, bool memory_accessed = false);
385
386 /* Return true if symbol's address may possibly be compared to other
387 symbol's address. */
388 bool address_matters_p ();
389
390 /* Return true if NODE's address can be compared. This use properties
391 of NODE only and does not look if the address is actually taken in
392 interesting way. For that use ADDRESS_MATTERS_P instead. */
393 bool address_can_be_compared_p (void);
394
395 /* Return symbol table node associated with DECL, if any,
396 and NULL otherwise. */
397 static inline symtab_node *get (const_tree decl)
398 {
399 /* Check that we are called for sane type of object - functions
400 and static or external variables. */
401 gcc_checking_assert (TREE_CODE (decl) == FUNCTION_DECL
402 || (TREE_CODE (decl) == VAR_DECL
403 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)
404 || in_lto_p)));
405 /* Check that the mapping is sane - perhaps this check can go away,
406 but at the moment frontends tends to corrupt the mapping by calling
407 memcpy/memset on the tree nodes. */
408 gcc_checking_assert (!decl->decl_with_vis.symtab_node
409 || decl->decl_with_vis.symtab_node->decl == decl);
410 return decl->decl_with_vis.symtab_node;
411 }
412
413 /* Try to find a symtab node for declaration DECL and if it does not
414 exist or if it corresponds to an inline clone, create a new one. */
415 static inline symtab_node * get_create (tree node);
416
417 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
418 Return NULL if there's no such node. */
419 static symtab_node *get_for_asmname (const_tree asmname);
420
421 /* Verify symbol table for internal consistency. */
422 static DEBUG_FUNCTION void verify_symtab_nodes (void);
423
424 /* Perform internal consistency checks, if they are enabled. */
425 static inline void checking_verify_symtab_nodes (void);
426
427 /* Type of the symbol. */
428 ENUM_BITFIELD (symtab_type) type : 8;
429
430 /* The symbols resolution. */
431 ENUM_BITFIELD (ld_plugin_symbol_resolution) resolution : 8;
432
433 /*** Flags representing the symbol type. ***/
434
435 /* True when symbol corresponds to a definition in current unit.
436 set via finalize_function or finalize_decl */
437 unsigned definition : 1;
438 /* True when symbol is an alias.
439 Set by ssemble_alias. */
440 unsigned alias : 1;
441 /* When true the alias is translated into its target symbol either by GCC
442 or assembler (it also may just be a duplicate declaration of the same
443 linker name).
444
445 Currently transparent aliases come in three different flavors
446 - aliases having the same assembler name as their target (aka duplicated
447 declarations). In this case the assembler names compare via
448 assembler_names_equal_p and weakref is false
449 - aliases that are renamed at a time being output to final file
450 by varasm.c. For those DECL_ASSEMBLER_NAME have
451 IDENTIFIER_TRANSPARENT_ALIAS set and thus also their assembler
452 name must be unique.
453 Weakrefs belong to this cateogry when we target assembler without
454 .weakref directive.
455 - weakrefs that are renamed by assembler via .weakref directive.
456 In this case the alias may or may not be definition (depending if
457 target declaration was seen by the compiler), weakref is set.
458 Unless we are before renaming statics, assembler names are different.
459
460 Given that we now support duplicate declarations, the second option is
461 redundant and will be removed. */
462 unsigned transparent_alias : 1;
463 /* True when alias is a weakref. */
464 unsigned weakref : 1;
465 /* C++ frontend produce same body aliases and extra name aliases for
466 virtual functions and vtables that are obviously equivalent.
467 Those aliases are bit special, especially because C++ frontend
468 visibility code is so ugly it can not get them right at first time
469 and their visibility needs to be copied from their "masters" at
470 the end of parsing. */
471 unsigned cpp_implicit_alias : 1;
472 /* Set once the definition was analyzed. The list of references and
473 other properties are built during analysis. */
474 unsigned analyzed : 1;
475 /* Set for write-only variables. */
476 unsigned writeonly : 1;
477 /* Visibility of symbol was used for further optimization; do not
478 permit further changes. */
479 unsigned refuse_visibility_changes : 1;
480
481 /*** Visibility and linkage flags. ***/
482
483 /* Set when function is visible by other units. */
484 unsigned externally_visible : 1;
485 /* Don't reorder to other symbols having this set. */
486 unsigned no_reorder : 1;
487 /* The symbol will be assumed to be used in an invisible way (like
488 by an toplevel asm statement). */
489 unsigned force_output : 1;
490 /* Like FORCE_OUTPUT, but in the case it is ABI requiring the symbol to be
491 exported. Unlike FORCE_OUTPUT this flag gets cleared to symbols promoted
492 to static and it does not inhibit optimization. */
493 unsigned forced_by_abi : 1;
494 /* True when the name is known to be unique and thus it does not need mangling. */
495 unsigned unique_name : 1;
496 /* Specify whether the section was set by user or by
497 compiler via -ffunction-sections. */
498 unsigned implicit_section : 1;
499 /* True when body and other characteristics have been removed by
500 symtab_remove_unreachable_nodes. */
501 unsigned body_removed : 1;
502
503 /*** WHOPR Partitioning flags.
504 These flags are used at ltrans stage when only part of the callgraph is
505 available. ***/
506
507 /* Set when variable is used from other LTRANS partition. */
508 unsigned used_from_other_partition : 1;
509 /* Set when function is available in the other LTRANS partition.
510 During WPA output it is used to mark nodes that are present in
511 multiple partitions. */
512 unsigned in_other_partition : 1;
513
514
515
516 /*** other flags. ***/
517
518 /* Set when symbol has address taken. */
519 unsigned address_taken : 1;
520 /* Set when init priority is set. */
521 unsigned in_init_priority_hash : 1;
522
523 /* Set when symbol needs to be streamed into LTO bytecode for LTO, or in case
524 of offloading, for separate compilation for a different target. */
525 unsigned need_lto_streaming : 1;
526
527 /* Set when symbol can be streamed into bytecode for offloading. */
528 unsigned offloadable : 1;
529
530
531 /* Ordering of all symtab entries. */
532 int order;
533
534 /* Declaration representing the symbol. */
535 tree decl;
536
537 /* Linked list of symbol table entries starting with symtab_nodes. */
538 symtab_node *next;
539 symtab_node *previous;
540
541 /* Linked list of symbols with the same asm name. There may be multiple
542 entries for single symbol name during LTO, because symbols are renamed
543 only after partitioning.
544
545 Because inline clones are kept in the assembler name has, they also produce
546 duplicate entries.
547
548 There are also several long standing bugs where frontends and builtin
549 code produce duplicated decls. */
550 symtab_node *next_sharing_asm_name;
551 symtab_node *previous_sharing_asm_name;
552
553 /* Circular list of nodes in the same comdat group if non-NULL. */
554 symtab_node *same_comdat_group;
555
556 /* Vectors of referring and referenced entities. */
557 ipa_ref_list ref_list;
558
559 /* Alias target. May be either DECL pointer or ASSEMBLER_NAME pointer
560 depending to what was known to frontend on the creation time.
561 Once alias is resolved, this pointer become NULL. */
562 tree alias_target;
563
564 /* File stream where this node is being written to. */
565 struct lto_file_decl_data * lto_file_data;
566
567 PTR GTY ((skip)) aux;
568
569 /* Comdat group the symbol is in. Can be private if GGC allowed that. */
570 tree x_comdat_group;
571
572 /* Section name. Again can be private, if allowed. */
573 section_hash_entry *x_section;
574
575 protected:
576 /* Dump base fields of symtab nodes to F. Not to be used directly. */
577 void dump_base (FILE *);
578
579 /* Verify common part of symtab node. */
580 bool DEBUG_FUNCTION verify_base (void);
581
582 /* Remove node from symbol table. This function is not used directly, but via
583 cgraph/varpool node removal routines. */
584 void unregister (void);
585
586 /* Return the initialization and finalization priority information for
587 DECL. If there is no previous priority information, a freshly
588 allocated structure is returned. */
589 struct symbol_priority_map *priority_info (void);
590
591 /* Worker for call_for_symbol_and_aliases_1. */
592 bool call_for_symbol_and_aliases_1 (bool (*callback) (symtab_node *, void *),
593 void *data,
594 bool include_overwrite);
595 private:
596 /* Worker for set_section. */
597 static bool set_section (symtab_node *n, void *s);
598
599 /* Worker for symtab_resolve_alias. */
600 static bool set_implicit_section (symtab_node *n, void *);
601
602 /* Worker searching noninterposable alias. */
603 static bool noninterposable_alias (symtab_node *node, void *data);
604
605 /* Worker for ultimate_alias_target. */
606 symtab_node *ultimate_alias_target_1 (enum availability *avail = NULL,
607 symtab_node *ref = NULL);
608
609 /* Get dump name with normal or assembly name. */
610 const char *get_dump_name (bool asm_name_p) const;
611 };
612
613 inline void
614 symtab_node::checking_verify_symtab_nodes (void)
615 {
616 if (flag_checking)
617 symtab_node::verify_symtab_nodes ();
618 }
619
620 /* Walk all aliases for NODE. */
621 #define FOR_EACH_ALIAS(node, alias) \
622 for (unsigned x_i = 0; node->iterate_direct_aliases (x_i, alias); x_i++)
623
624 /* This is the information that is put into the cgraph local structure
625 to recover a function. */
626 struct lto_file_decl_data;
627
628 extern const char * const cgraph_availability_names[];
629 extern const char * const ld_plugin_symbol_resolution_names[];
630 extern const char * const tls_model_names[];
631
632 /* Sub-structure of cgraph_node. Holds information about thunk, used only for
633 same body aliases.
634
635 Thunks are basically wrappers around methods which are introduced in case
636 of multiple inheritance in order to adjust the value of the "this" pointer
637 or of the returned value.
638
639 In the case of this-adjusting thunks, each back-end can override the
640 can_output_mi_thunk/output_mi_thunk target hooks to generate a minimal thunk
641 (with a tail call for instance) directly as assembly. For the default hook
642 or for the case where the can_output_mi_thunk hooks return false, the thunk
643 is gimplified and lowered using the regular machinery. */
644
645 struct GTY(()) cgraph_thunk_info {
646 /* Offset used to adjust "this". */
647 HOST_WIDE_INT fixed_offset;
648
649 /* Offset in the virtual table to get the offset to adjust "this". Valid iff
650 VIRTUAL_OFFSET_P is true. */
651 HOST_WIDE_INT virtual_value;
652
653 /* Thunk target, i.e. the method that this thunk wraps. Depending on the
654 TARGET_USE_LOCAL_THUNK_ALIAS_P macro, this may have to be a new alias. */
655 tree alias;
656
657 /* Nonzero for a "this" adjusting thunk and zero for a result adjusting
658 thunk. */
659 bool this_adjusting;
660
661 /* If true, this thunk is what we call a virtual thunk. In this case:
662 * for this-adjusting thunks, after the FIXED_OFFSET based adjustment is
663 done, add to the result the offset found in the vtable at:
664 vptr + VIRTUAL_VALUE
665 * for result-adjusting thunks, the FIXED_OFFSET adjustment is done after
666 the virtual one. */
667 bool virtual_offset_p;
668
669 /* ??? True for special kind of thunks, seems related to instrumentation. */
670 bool add_pointer_bounds_args;
671
672 /* Set to true when alias node (the cgraph_node to which this struct belong)
673 is a thunk. Access to any other fields is invalid if this is false. */
674 bool thunk_p;
675 };
676
677 /* Information about the function collected locally.
678 Available after function is analyzed. */
679
680 struct GTY(()) cgraph_local_info {
681 /* Set when function is visible in current compilation unit only and
682 its address is never taken. */
683 unsigned local : 1;
684
685 /* False when there is something makes versioning impossible. */
686 unsigned versionable : 1;
687
688 /* False when function calling convention and signature can not be changed.
689 This is the case when __builtin_apply_args is used. */
690 unsigned can_change_signature : 1;
691
692 /* True when the function has been originally extern inline, but it is
693 redefined now. */
694 unsigned redefined_extern_inline : 1;
695
696 /* True if the function may enter serial irrevocable mode. */
697 unsigned tm_may_enter_irr : 1;
698 };
699
700 /* Information about the function that needs to be computed globally
701 once compilation is finished. Available only with -funit-at-a-time. */
702
703 struct GTY(()) cgraph_global_info {
704 /* For inline clones this points to the function they will be
705 inlined into. */
706 cgraph_node *inlined_to;
707 };
708
709 /* Represent which DECL tree (or reference to such tree)
710 will be replaced by another tree while versioning. */
711 struct GTY(()) ipa_replace_map
712 {
713 /* The tree that will be replaced. */
714 tree old_tree;
715 /* The new (replacing) tree. */
716 tree new_tree;
717 /* Parameter number to replace, when old_tree is NULL. */
718 int parm_num;
719 /* True when a substitution should be done, false otherwise. */
720 bool replace_p;
721 /* True when we replace a reference to old_tree. */
722 bool ref_p;
723 };
724
725 struct GTY(()) cgraph_clone_info
726 {
727 vec<ipa_replace_map *, va_gc> *tree_map;
728 bitmap args_to_skip;
729 bitmap combined_args_to_skip;
730 };
731
732 enum cgraph_simd_clone_arg_type
733 {
734 SIMD_CLONE_ARG_TYPE_VECTOR,
735 SIMD_CLONE_ARG_TYPE_UNIFORM,
736 /* These are only for integer/pointer arguments passed by value. */
737 SIMD_CLONE_ARG_TYPE_LINEAR_CONSTANT_STEP,
738 SIMD_CLONE_ARG_TYPE_LINEAR_VARIABLE_STEP,
739 /* These 6 are only for reference type arguments or arguments passed
740 by reference. */
741 SIMD_CLONE_ARG_TYPE_LINEAR_REF_CONSTANT_STEP,
742 SIMD_CLONE_ARG_TYPE_LINEAR_REF_VARIABLE_STEP,
743 SIMD_CLONE_ARG_TYPE_LINEAR_UVAL_CONSTANT_STEP,
744 SIMD_CLONE_ARG_TYPE_LINEAR_UVAL_VARIABLE_STEP,
745 SIMD_CLONE_ARG_TYPE_LINEAR_VAL_CONSTANT_STEP,
746 SIMD_CLONE_ARG_TYPE_LINEAR_VAL_VARIABLE_STEP,
747 SIMD_CLONE_ARG_TYPE_MASK
748 };
749
750 /* Function arguments in the original function of a SIMD clone.
751 Supplementary data for `struct simd_clone'. */
752
753 struct GTY(()) cgraph_simd_clone_arg {
754 /* Original function argument as it originally existed in
755 DECL_ARGUMENTS. */
756 tree orig_arg;
757
758 /* orig_arg's function (or for extern functions type from
759 TYPE_ARG_TYPES). */
760 tree orig_type;
761
762 /* If argument is a vector, this holds the vector version of
763 orig_arg that after adjusting the argument types will live in
764 DECL_ARGUMENTS. Otherwise, this is NULL.
765
766 This basically holds:
767 vector(simdlen) __typeof__(orig_arg) new_arg. */
768 tree vector_arg;
769
770 /* vector_arg's type (or for extern functions new vector type. */
771 tree vector_type;
772
773 /* If argument is a vector, this holds the array where the simd
774 argument is held while executing the simd clone function. This
775 is a local variable in the cloned function. Its content is
776 copied from vector_arg upon entry to the clone.
777
778 This basically holds:
779 __typeof__(orig_arg) simd_array[simdlen]. */
780 tree simd_array;
781
782 /* A SIMD clone's argument can be either linear (constant or
783 variable), uniform, or vector. */
784 enum cgraph_simd_clone_arg_type arg_type;
785
786 /* For arg_type SIMD_CLONE_ARG_TYPE_LINEAR_*CONSTANT_STEP this is
787 the constant linear step, if arg_type is
788 SIMD_CLONE_ARG_TYPE_LINEAR_*VARIABLE_STEP, this is index of
789 the uniform argument holding the step, otherwise 0. */
790 HOST_WIDE_INT linear_step;
791
792 /* Variable alignment if available, otherwise 0. */
793 unsigned int alignment;
794 };
795
796 /* Specific data for a SIMD function clone. */
797
798 struct GTY(()) cgraph_simd_clone {
799 /* Number of words in the SIMD lane associated with this clone. */
800 unsigned int simdlen;
801
802 /* Number of annotated function arguments in `args'. This is
803 usually the number of named arguments in FNDECL. */
804 unsigned int nargs;
805
806 /* Max hardware vector size in bits for integral vectors. */
807 unsigned int vecsize_int;
808
809 /* Max hardware vector size in bits for floating point vectors. */
810 unsigned int vecsize_float;
811
812 /* Machine mode of the mask argument(s), if they are to be passed
813 as bitmasks in integer argument(s). VOIDmode if masks are passed
814 as vectors of characteristic type. */
815 machine_mode mask_mode;
816
817 /* The mangling character for a given vector size. This is used
818 to determine the ISA mangling bit as specified in the Intel
819 Vector ABI. */
820 unsigned char vecsize_mangle;
821
822 /* True if this is the masked, in-branch version of the clone,
823 otherwise false. */
824 unsigned int inbranch : 1;
825
826 /* True if this is a Cilk Plus variant. */
827 unsigned int cilk_elemental : 1;
828
829 /* Doubly linked list of SIMD clones. */
830 cgraph_node *prev_clone, *next_clone;
831
832 /* Original cgraph node the SIMD clones were created for. */
833 cgraph_node *origin;
834
835 /* Annotated function arguments for the original function. */
836 cgraph_simd_clone_arg GTY((length ("%h.nargs"))) args[1];
837 };
838
839 /* Function Multiversioning info. */
840 struct GTY((for_user)) cgraph_function_version_info {
841 /* The cgraph_node for which the function version info is stored. */
842 cgraph_node *this_node;
843 /* Chains all the semantically identical function versions. The
844 first function in this chain is the version_info node of the
845 default function. */
846 cgraph_function_version_info *prev;
847 /* If this version node corresponds to a dispatcher for function
848 versions, this points to the version info node of the default
849 function, the first node in the chain. */
850 cgraph_function_version_info *next;
851 /* If this node corresponds to a function version, this points
852 to the dispatcher function decl, which is the function that must
853 be called to execute the right function version at run-time.
854
855 If this cgraph node is a dispatcher (if dispatcher_function is
856 true, in the cgraph_node struct) for function versions, this
857 points to resolver function, which holds the function body of the
858 dispatcher. The dispatcher decl is an alias to the resolver
859 function decl. */
860 tree dispatcher_resolver;
861 };
862
863 #define DEFCIFCODE(code, type, string) CIF_ ## code,
864 /* Reasons for inlining failures. */
865
866 enum cgraph_inline_failed_t {
867 #include "cif-code.def"
868 CIF_N_REASONS
869 };
870
871 enum cgraph_inline_failed_type_t
872 {
873 CIF_FINAL_NORMAL = 0,
874 CIF_FINAL_ERROR
875 };
876
877 struct cgraph_edge;
878
879 struct cgraph_edge_hasher : ggc_ptr_hash<cgraph_edge>
880 {
881 typedef gimple *compare_type;
882
883 static hashval_t hash (cgraph_edge *);
884 static hashval_t hash (gimple *);
885 static bool equal (cgraph_edge *, gimple *);
886 };
887
888 /* The cgraph data structure.
889 Each function decl has assigned cgraph_node listing callees and callers. */
890
891 struct GTY((tag ("SYMTAB_FUNCTION"))) cgraph_node : public symtab_node {
892 public:
893 /* Remove the node from cgraph and all inline clones inlined into it.
894 Skip however removal of FORBIDDEN_NODE and return true if it needs to be
895 removed. This allows to call the function from outer loop walking clone
896 tree. */
897 bool remove_symbol_and_inline_clones (cgraph_node *forbidden_node = NULL);
898
899 /* Record all references from cgraph_node that are taken
900 in statement STMT. */
901 void record_stmt_references (gimple *stmt);
902
903 /* Like cgraph_set_call_stmt but walk the clone tree and update all
904 clones sharing the same function body.
905 When WHOLE_SPECULATIVE_EDGES is true, all three components of
906 speculative edge gets updated. Otherwise we update only direct
907 call. */
908 void set_call_stmt_including_clones (gimple *old_stmt, gcall *new_stmt,
909 bool update_speculative = true);
910
911 /* Walk the alias chain to return the function cgraph_node is alias of.
912 Walk through thunk, too.
913 When AVAILABILITY is non-NULL, get minimal availability in the chain.
914 When REF is non-NULL, assume that reference happens in symbol REF
915 when determining the availability. */
916 cgraph_node *function_symbol (enum availability *avail = NULL,
917 struct symtab_node *ref = NULL);
918
919 /* Walk the alias chain to return the function cgraph_node is alias of.
920 Walk through non virtual thunks, too. Thus we return either a function
921 or a virtual thunk node.
922 When AVAILABILITY is non-NULL, get minimal availability in the chain.
923 When REF is non-NULL, assume that reference happens in symbol REF
924 when determining the availability. */
925 cgraph_node *function_or_virtual_thunk_symbol
926 (enum availability *avail = NULL,
927 struct symtab_node *ref = NULL);
928
929 /* Create node representing clone of N executed COUNT times. Decrease
930 the execution counts from original node too.
931 The new clone will have decl set to DECL that may or may not be the same
932 as decl of N.
933
934 When UPDATE_ORIGINAL is true, the counts are subtracted from the original
935 function's profile to reflect the fact that part of execution is handled
936 by node.
937 When CALL_DUPLICATOIN_HOOK is true, the ipa passes are acknowledged about
938 the new clone. Otherwise the caller is responsible for doing so later.
939
940 If the new node is being inlined into another one, NEW_INLINED_TO should be
941 the outline function the new one is (even indirectly) inlined to.
942 All hooks will see this in node's global.inlined_to, when invoked.
943 Can be NULL if the node is not inlined. SUFFIX is string that is appended
944 to the original name. */
945 cgraph_node *create_clone (tree decl, profile_count count,
946 bool update_original,
947 vec<cgraph_edge *> redirect_callers,
948 bool call_duplication_hook,
949 cgraph_node *new_inlined_to,
950 bitmap args_to_skip, const char *suffix = NULL);
951
952 /* Create callgraph node clone with new declaration. The actual body will
953 be copied later at compilation stage. */
954 cgraph_node *create_virtual_clone (vec<cgraph_edge *> redirect_callers,
955 vec<ipa_replace_map *, va_gc> *tree_map,
956 bitmap args_to_skip, const char * suffix);
957
958 /* cgraph node being removed from symbol table; see if its entry can be
959 replaced by other inline clone. */
960 cgraph_node *find_replacement (void);
961
962 /* Create a new cgraph node which is the new version of
963 callgraph node. REDIRECT_CALLERS holds the callers
964 edges which should be redirected to point to
965 NEW_VERSION. ALL the callees edges of the node
966 are cloned to the new version node. Return the new
967 version node.
968
969 If non-NULL BLOCK_TO_COPY determine what basic blocks
970 was copied to prevent duplications of calls that are dead
971 in the clone.
972
973 SUFFIX is string that is appended to the original name. */
974
975 cgraph_node *create_version_clone (tree new_decl,
976 vec<cgraph_edge *> redirect_callers,
977 bitmap bbs_to_copy,
978 const char *suffix = NULL);
979
980 /* Perform function versioning.
981 Function versioning includes copying of the tree and
982 a callgraph update (creating a new cgraph node and updating
983 its callees and callers).
984
985 REDIRECT_CALLERS varray includes the edges to be redirected
986 to the new version.
987
988 TREE_MAP is a mapping of tree nodes we want to replace with
989 new ones (according to results of prior analysis).
990
991 If non-NULL ARGS_TO_SKIP determine function parameters to remove
992 from new version.
993 If SKIP_RETURN is true, the new version will return void.
994 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
995 If non_NULL NEW_ENTRY determine new entry BB of the clone.
996
997 Return the new version's cgraph node. */
998 cgraph_node *create_version_clone_with_body
999 (vec<cgraph_edge *> redirect_callers,
1000 vec<ipa_replace_map *, va_gc> *tree_map, bitmap args_to_skip,
1001 bool skip_return, bitmap bbs_to_copy, basic_block new_entry_block,
1002 const char *clone_name);
1003
1004 /* Insert a new cgraph_function_version_info node into cgraph_fnver_htab
1005 corresponding to cgraph_node. */
1006 cgraph_function_version_info *insert_new_function_version (void);
1007
1008 /* Get the cgraph_function_version_info node corresponding to node. */
1009 cgraph_function_version_info *function_version (void);
1010
1011 /* Discover all functions and variables that are trivially needed, analyze
1012 them as well as all functions and variables referred by them */
1013 void analyze (void);
1014
1015 /* Add thunk alias into callgraph. The alias declaration is ALIAS and it
1016 aliases DECL with an adjustments made into the first parameter.
1017 See comments in struct cgraph_thunk_info for detail on the parameters. */
1018 cgraph_node * create_thunk (tree alias, tree, bool this_adjusting,
1019 HOST_WIDE_INT fixed_offset,
1020 HOST_WIDE_INT virtual_value,
1021 tree virtual_offset,
1022 tree real_alias);
1023
1024
1025 /* Return node that alias is aliasing. */
1026 inline cgraph_node *get_alias_target (void);
1027
1028 /* Given function symbol, walk the alias chain to return the function node
1029 is alias of. Do not walk through thunks.
1030 When AVAILABILITY is non-NULL, get minimal availability in the chain.
1031 When REF is non-NULL, assume that reference happens in symbol REF
1032 when determining the availability. */
1033
1034 cgraph_node *ultimate_alias_target (availability *availability = NULL,
1035 symtab_node *ref = NULL);
1036
1037 /* Expand thunk NODE to gimple if possible.
1038 When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
1039 no assembler is produced.
1040 When OUTPUT_ASM_THUNK is true, also produce assembler for
1041 thunks that are not lowered. */
1042 bool expand_thunk (bool output_asm_thunks, bool force_gimple_thunk);
1043
1044 /* Call expand_thunk on all callers that are thunks and analyze those
1045 nodes that were expanded. */
1046 void expand_all_artificial_thunks ();
1047
1048 /* Assemble thunks and aliases associated to node. */
1049 void assemble_thunks_and_aliases (void);
1050
1051 /* Expand function specified by node. */
1052 void expand (void);
1053
1054 /* As an GCC extension we allow redefinition of the function. The
1055 semantics when both copies of bodies differ is not well defined.
1056 We replace the old body with new body so in unit at a time mode
1057 we always use new body, while in normal mode we may end up with
1058 old body inlined into some functions and new body expanded and
1059 inlined in others. */
1060 void reset (void);
1061
1062 /* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
1063 kind of wrapper method. */
1064 void create_wrapper (cgraph_node *target);
1065
1066 /* Verify cgraph nodes of the cgraph node. */
1067 void DEBUG_FUNCTION verify_node (void);
1068
1069 /* Remove function from symbol table. */
1070 void remove (void);
1071
1072 /* Dump call graph node to file F. */
1073 void dump (FILE *f);
1074
1075 /* Dump call graph node to stderr. */
1076 void DEBUG_FUNCTION debug (void);
1077
1078 /* When doing LTO, read cgraph_node's body from disk if it is not already
1079 present. */
1080 bool get_untransformed_body (void);
1081
1082 /* Prepare function body. When doing LTO, read cgraph_node's body from disk
1083 if it is not already present. When some IPA transformations are scheduled,
1084 apply them. */
1085 bool get_body (void);
1086
1087 /* Release memory used to represent body of function.
1088 Use this only for functions that are released before being translated to
1089 target code (i.e. RTL). Functions that are compiled to RTL and beyond
1090 are free'd in final.c via free_after_compilation(). */
1091 void release_body (bool keep_arguments = false);
1092
1093 /* Return the DECL_STRUCT_FUNCTION of the function. */
1094 struct function *get_fun (void);
1095
1096 /* cgraph_node is no longer nested function; update cgraph accordingly. */
1097 void unnest (void);
1098
1099 /* Bring cgraph node local. */
1100 void make_local (void);
1101
1102 /* Likewise indicate that a node is having address taken. */
1103 void mark_address_taken (void);
1104
1105 /* Set fialization priority to PRIORITY. */
1106 void set_fini_priority (priority_type priority);
1107
1108 /* Return the finalization priority. */
1109 priority_type get_fini_priority (void);
1110
1111 /* Create edge from a given function to CALLEE in the cgraph. */
1112 cgraph_edge *create_edge (cgraph_node *callee,
1113 gcall *call_stmt, profile_count count);
1114
1115 /* Create an indirect edge with a yet-undetermined callee where the call
1116 statement destination is a formal parameter of the caller with index
1117 PARAM_INDEX. */
1118 cgraph_edge *create_indirect_edge (gcall *call_stmt, int ecf_flags,
1119 profile_count count,
1120 bool compute_indirect_info = true);
1121
1122 /* Like cgraph_create_edge walk the clone tree and update all clones sharing
1123 same function body. If clones already have edge for OLD_STMT; only
1124 update the edge same way as cgraph_set_call_stmt_including_clones does. */
1125 void create_edge_including_clones (cgraph_node *callee,
1126 gimple *old_stmt, gcall *stmt,
1127 profile_count count,
1128 cgraph_inline_failed_t reason);
1129
1130 /* Return the callgraph edge representing the GIMPLE_CALL statement
1131 CALL_STMT. */
1132 cgraph_edge *get_edge (gimple *call_stmt);
1133
1134 /* Collect all callers of cgraph_node and its aliases that are known to lead
1135 to NODE (i.e. are not overwritable) and that are not thunks. */
1136 vec<cgraph_edge *> collect_callers (void);
1137
1138 /* Remove all callers from the node. */
1139 void remove_callers (void);
1140
1141 /* Remove all callees from the node. */
1142 void remove_callees (void);
1143
1144 /* Return function availability. See cgraph.h for description of individual
1145 return values. */
1146 enum availability get_availability (symtab_node *ref = NULL);
1147
1148 /* Set TREE_NOTHROW on cgraph_node's decl and on aliases of the node
1149 if any to NOTHROW. */
1150 bool set_nothrow_flag (bool nothrow);
1151
1152 /* SET DECL_IS_MALLOC on cgraph_node's decl and on aliases of the node
1153 if any. */
1154 bool set_malloc_flag (bool malloc_p);
1155
1156 /* If SET_CONST is true, mark function, aliases and thunks to be ECF_CONST.
1157 If SET_CONST if false, clear the flag.
1158
1159 When setting the flag be careful about possible interposition and
1160 do not set the flag for functions that can be interposet and set pure
1161 flag for functions that can bind to other definition.
1162
1163 Return true if any change was done. */
1164
1165 bool set_const_flag (bool set_const, bool looping);
1166
1167 /* Set DECL_PURE_P on cgraph_node's decl and on aliases of the node
1168 if any to PURE.
1169
1170 When setting the flag, be careful about possible interposition.
1171 Return true if any change was done. */
1172
1173 bool set_pure_flag (bool pure, bool looping);
1174
1175 /* Call callback on function and aliases associated to the function.
1176 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
1177 skipped. */
1178
1179 bool call_for_symbol_and_aliases (bool (*callback) (cgraph_node *,
1180 void *),
1181 void *data, bool include_overwritable);
1182
1183 /* Call callback on cgraph_node, thunks and aliases associated to NODE.
1184 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
1185 skipped. When EXCLUDE_VIRTUAL_THUNKS is true, virtual thunks are
1186 skipped. */
1187 bool call_for_symbol_thunks_and_aliases (bool (*callback) (cgraph_node *node,
1188 void *data),
1189 void *data,
1190 bool include_overwritable,
1191 bool exclude_virtual_thunks = false);
1192
1193 /* Likewise indicate that a node is needed, i.e. reachable via some
1194 external means. */
1195 inline void mark_force_output (void);
1196
1197 /* Return true when function can be marked local. */
1198 bool local_p (void);
1199
1200 /* Return true if cgraph_node can be made local for API change.
1201 Extern inline functions and C++ COMDAT functions can be made local
1202 at the expense of possible code size growth if function is used in multiple
1203 compilation units. */
1204 bool can_be_local_p (void);
1205
1206 /* Return true when cgraph_node can not return or throw and thus
1207 it is safe to ignore its side effects for IPA analysis. */
1208 bool cannot_return_p (void);
1209
1210 /* Return true when function cgraph_node and all its aliases are only called
1211 directly.
1212 i.e. it is not externally visible, address was not taken and
1213 it is not used in any other non-standard way. */
1214 bool only_called_directly_p (void);
1215
1216 /* Return true when function is only called directly or it has alias.
1217 i.e. it is not externally visible, address was not taken and
1218 it is not used in any other non-standard way. */
1219 inline bool only_called_directly_or_aliased_p (void);
1220
1221 /* Return true when function cgraph_node can be expected to be removed
1222 from program when direct calls in this compilation unit are removed.
1223
1224 As a special case COMDAT functions are
1225 cgraph_can_remove_if_no_direct_calls_p while the are not
1226 cgraph_only_called_directly_p (it is possible they are called from other
1227 unit)
1228
1229 This function behaves as cgraph_only_called_directly_p because eliminating
1230 all uses of COMDAT function does not make it necessarily disappear from
1231 the program unless we are compiling whole program or we do LTO. In this
1232 case we know we win since dynamic linking will not really discard the
1233 linkonce section.
1234
1235 If WILL_INLINE is true, assume that function will be inlined into all the
1236 direct calls. */
1237 bool will_be_removed_from_program_if_no_direct_calls_p
1238 (bool will_inline = false);
1239
1240 /* Return true when function can be removed from callgraph
1241 if all direct calls and references are eliminated. The function does
1242 not take into account comdat groups. */
1243 bool can_remove_if_no_direct_calls_and_refs_p (void);
1244
1245 /* Return true when function cgraph_node and its aliases can be removed from
1246 callgraph if all direct calls are eliminated.
1247 If WILL_INLINE is true, assume that function will be inlined into all the
1248 direct calls. */
1249 bool can_remove_if_no_direct_calls_p (bool will_inline = false);
1250
1251 /* Return true when callgraph node is a function with Gimple body defined
1252 in current unit. Functions can also be define externally or they
1253 can be thunks with no Gimple representation.
1254
1255 Note that at WPA stage, the function body may not be present in memory. */
1256 inline bool has_gimple_body_p (void);
1257
1258 /* Return true if function should be optimized for size. */
1259 bool optimize_for_size_p (void);
1260
1261 /* Dump the callgraph to file F. */
1262 static void dump_cgraph (FILE *f);
1263
1264 /* Dump the call graph to stderr. */
1265 static inline
1266 void debug_cgraph (void)
1267 {
1268 dump_cgraph (stderr);
1269 }
1270
1271 /* Record that DECL1 and DECL2 are semantically identical function
1272 versions. */
1273 static void record_function_versions (tree decl1, tree decl2);
1274
1275 /* Remove the cgraph_function_version_info and cgraph_node for DECL. This
1276 DECL is a duplicate declaration. */
1277 static void delete_function_version_by_decl (tree decl);
1278
1279 /* Add the function FNDECL to the call graph.
1280 Unlike finalize_function, this function is intended to be used
1281 by middle end and allows insertion of new function at arbitrary point
1282 of compilation. The function can be either in high, low or SSA form
1283 GIMPLE.
1284
1285 The function is assumed to be reachable and have address taken (so no
1286 API breaking optimizations are performed on it).
1287
1288 Main work done by this function is to enqueue the function for later
1289 processing to avoid need the passes to be re-entrant. */
1290 static void add_new_function (tree fndecl, bool lowered);
1291
1292 /* Return callgraph node for given symbol and check it is a function. */
1293 static inline cgraph_node *get (const_tree decl)
1294 {
1295 gcc_checking_assert (TREE_CODE (decl) == FUNCTION_DECL);
1296 return dyn_cast <cgraph_node *> (symtab_node::get (decl));
1297 }
1298
1299 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
1300 logic in effect. If NO_COLLECT is true, then our caller cannot stand to
1301 have the garbage collector run at the moment. We would need to either
1302 create a new GC context, or just not compile right now. */
1303 static void finalize_function (tree, bool);
1304
1305 /* Return cgraph node assigned to DECL. Create new one when needed. */
1306 static cgraph_node * create (tree decl);
1307
1308 /* Try to find a call graph node for declaration DECL and if it does not
1309 exist or if it corresponds to an inline clone, create a new one. */
1310 static cgraph_node * get_create (tree);
1311
1312 /* Return local info for the compiled function. */
1313 static cgraph_local_info *local_info (tree decl);
1314
1315 /* Return local info for the compiled function. */
1316 static struct cgraph_rtl_info *rtl_info (tree);
1317
1318 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
1319 Return NULL if there's no such node. */
1320 static cgraph_node *get_for_asmname (tree asmname);
1321
1322 /* Attempt to mark ALIAS as an alias to DECL. Return alias node if
1323 successful and NULL otherwise.
1324 Same body aliases are output whenever the body of DECL is output,
1325 and cgraph_node::get (ALIAS) transparently
1326 returns cgraph_node::get (DECL). */
1327 static cgraph_node * create_same_body_alias (tree alias, tree decl);
1328
1329 /* Verify whole cgraph structure. */
1330 static void DEBUG_FUNCTION verify_cgraph_nodes (void);
1331
1332 /* Verify cgraph, if consistency checking is enabled. */
1333 static inline void checking_verify_cgraph_nodes (void);
1334
1335 /* Worker to bring NODE local. */
1336 static bool make_local (cgraph_node *node, void *);
1337
1338 /* Mark ALIAS as an alias to DECL. DECL_NODE is cgraph node representing
1339 the function body is associated
1340 with (not necessarily cgraph_node (DECL). */
1341 static cgraph_node *create_alias (tree alias, tree target);
1342
1343 /* Return true if NODE has thunk. */
1344 static bool has_thunk_p (cgraph_node *node, void *);
1345
1346 cgraph_edge *callees;
1347 cgraph_edge *callers;
1348 /* List of edges representing indirect calls with a yet undetermined
1349 callee. */
1350 cgraph_edge *indirect_calls;
1351 /* For nested functions points to function the node is nested in. */
1352 cgraph_node *origin;
1353 /* Points to first nested function, if any. */
1354 cgraph_node *nested;
1355 /* Pointer to the next function with same origin, if any. */
1356 cgraph_node *next_nested;
1357 /* Pointer to the next clone. */
1358 cgraph_node *next_sibling_clone;
1359 cgraph_node *prev_sibling_clone;
1360 cgraph_node *clones;
1361 cgraph_node *clone_of;
1362 /* If instrumentation_clone is 1 then instrumented_version points
1363 to the original function used to make instrumented version.
1364 Otherwise points to instrumented version of the function. */
1365 cgraph_node *instrumented_version;
1366 /* If instrumentation_clone is 1 then orig_decl is the original
1367 function declaration. */
1368 tree orig_decl;
1369 /* For functions with many calls sites it holds map from call expression
1370 to the edge to speed up cgraph_edge function. */
1371 hash_table<cgraph_edge_hasher> *GTY(()) call_site_hash;
1372 /* Declaration node used to be clone of. */
1373 tree former_clone_of;
1374
1375 /* If this is a SIMD clone, this points to the SIMD specific
1376 information for it. */
1377 cgraph_simd_clone *simdclone;
1378 /* If this function has SIMD clones, this points to the first clone. */
1379 cgraph_node *simd_clones;
1380
1381 /* Interprocedural passes scheduled to have their transform functions
1382 applied next time we execute local pass on them. We maintain it
1383 per-function in order to allow IPA passes to introduce new functions. */
1384 vec<ipa_opt_pass> GTY((skip)) ipa_transforms_to_apply;
1385
1386 cgraph_local_info local;
1387 cgraph_global_info global;
1388 struct cgraph_rtl_info *rtl;
1389 cgraph_clone_info clone;
1390 cgraph_thunk_info thunk;
1391
1392 /* Expected number of executions: calculated in profile.c. */
1393 profile_count count;
1394 /* How to scale counts at materialization time; used to merge
1395 LTO units with different number of profile runs. */
1396 int count_materialization_scale;
1397 /* Unique id of the node. */
1398 int uid;
1399 /* Summary unique id of the node. */
1400 int summary_uid;
1401 /* ID assigned by the profiling. */
1402 unsigned int profile_id;
1403 /* Time profiler: first run of function. */
1404 int tp_first_run;
1405
1406 /* Set when decl is an abstract function pointed to by the
1407 ABSTRACT_DECL_ORIGIN of a reachable function. */
1408 unsigned used_as_abstract_origin : 1;
1409 /* Set once the function is lowered (i.e. its CFG is built). */
1410 unsigned lowered : 1;
1411 /* Set once the function has been instantiated and its callee
1412 lists created. */
1413 unsigned process : 1;
1414 /* How commonly executed the node is. Initialized during branch
1415 probabilities pass. */
1416 ENUM_BITFIELD (node_frequency) frequency : 2;
1417 /* True when function can only be called at startup (from static ctor). */
1418 unsigned only_called_at_startup : 1;
1419 /* True when function can only be called at startup (from static dtor). */
1420 unsigned only_called_at_exit : 1;
1421 /* True when function is the transactional clone of a function which
1422 is called only from inside transactions. */
1423 /* ?? We should be able to remove this. We have enough bits in
1424 cgraph to calculate it. */
1425 unsigned tm_clone : 1;
1426 /* True if this decl is a dispatcher for function versions. */
1427 unsigned dispatcher_function : 1;
1428 /* True if this decl calls a COMDAT-local function. This is set up in
1429 compute_fn_summary and inline_call. */
1430 unsigned calls_comdat_local : 1;
1431 /* True if node has been created by merge operation in IPA-ICF. */
1432 unsigned icf_merged: 1;
1433 /* True when function is clone created for Pointer Bounds Checker
1434 instrumentation. */
1435 unsigned instrumentation_clone : 1;
1436 /* True if call to node can't result in a call to free, munmap or
1437 other operation that could make previously non-trapping memory
1438 accesses trapping. */
1439 unsigned nonfreeing_fn : 1;
1440 /* True if there was multiple COMDAT bodies merged by lto-symtab. */
1441 unsigned merged_comdat : 1;
1442 /* True if function was created to be executed in parallel. */
1443 unsigned parallelized_function : 1;
1444 /* True if function is part split out by ipa-split. */
1445 unsigned split_part : 1;
1446 /* True if the function appears as possible target of indirect call. */
1447 unsigned indirect_call_target : 1;
1448
1449 private:
1450 /* Worker for call_for_symbol_and_aliases. */
1451 bool call_for_symbol_and_aliases_1 (bool (*callback) (cgraph_node *,
1452 void *),
1453 void *data, bool include_overwritable);
1454 };
1455
1456 /* A cgraph node set is a collection of cgraph nodes. A cgraph node
1457 can appear in multiple sets. */
1458 struct cgraph_node_set_def
1459 {
1460 hash_map<cgraph_node *, size_t> *map;
1461 vec<cgraph_node *> nodes;
1462 };
1463
1464 typedef cgraph_node_set_def *cgraph_node_set;
1465 typedef struct varpool_node_set_def *varpool_node_set;
1466
1467 class varpool_node;
1468
1469 /* A varpool node set is a collection of varpool nodes. A varpool node
1470 can appear in multiple sets. */
1471 struct varpool_node_set_def
1472 {
1473 hash_map<varpool_node *, size_t> * map;
1474 vec<varpool_node *> nodes;
1475 };
1476
1477 /* Iterator structure for cgraph node sets. */
1478 struct cgraph_node_set_iterator
1479 {
1480 cgraph_node_set set;
1481 unsigned index;
1482 };
1483
1484 /* Iterator structure for varpool node sets. */
1485 struct varpool_node_set_iterator
1486 {
1487 varpool_node_set set;
1488 unsigned index;
1489 };
1490
1491 /* Context of polymorphic call. It represent information about the type of
1492 instance that may reach the call. This is used by ipa-devirt walkers of the
1493 type inheritance graph. */
1494
1495 class GTY(()) ipa_polymorphic_call_context {
1496 public:
1497 /* The called object appears in an object of type OUTER_TYPE
1498 at offset OFFSET. When information is not 100% reliable, we
1499 use SPECULATIVE_OUTER_TYPE and SPECULATIVE_OFFSET. */
1500 HOST_WIDE_INT offset;
1501 HOST_WIDE_INT speculative_offset;
1502 tree outer_type;
1503 tree speculative_outer_type;
1504 /* True if outer object may be in construction or destruction. */
1505 unsigned maybe_in_construction : 1;
1506 /* True if outer object may be of derived type. */
1507 unsigned maybe_derived_type : 1;
1508 /* True if speculative outer object may be of derived type. We always
1509 speculate that construction does not happen. */
1510 unsigned speculative_maybe_derived_type : 1;
1511 /* True if the context is invalid and all calls should be redirected
1512 to BUILTIN_UNREACHABLE. */
1513 unsigned invalid : 1;
1514 /* True if the outer type is dynamic. */
1515 unsigned dynamic : 1;
1516
1517 /* Build empty "I know nothing" context. */
1518 ipa_polymorphic_call_context ();
1519 /* Build polymorphic call context for indirect call E. */
1520 ipa_polymorphic_call_context (cgraph_edge *e);
1521 /* Build polymorphic call context for IP invariant CST.
1522 If specified, OTR_TYPE specify the type of polymorphic call
1523 that takes CST+OFFSET as a prameter. */
1524 ipa_polymorphic_call_context (tree cst, tree otr_type = NULL,
1525 HOST_WIDE_INT offset = 0);
1526 /* Build context for pointer REF contained in FNDECL at statement STMT.
1527 if INSTANCE is non-NULL, return pointer to the object described by
1528 the context. */
1529 ipa_polymorphic_call_context (tree fndecl, tree ref, gimple *stmt,
1530 tree *instance = NULL);
1531
1532 /* Look for vtable stores or constructor calls to work out dynamic type
1533 of memory location. */
1534 bool get_dynamic_type (tree, tree, tree, gimple *);
1535
1536 /* Make context non-speculative. */
1537 void clear_speculation ();
1538
1539 /* Produce context specifying all derrived types of OTR_TYPE. If OTR_TYPE is
1540 NULL, the context is set to dummy "I know nothing" setting. */
1541 void clear_outer_type (tree otr_type = NULL);
1542
1543 /* Walk container types and modify context to point to actual class
1544 containing OTR_TYPE (if non-NULL) as base class.
1545 Return true if resulting context is valid.
1546
1547 When CONSIDER_PLACEMENT_NEW is false, reject contexts that may be made
1548 valid only via allocation of new polymorphic type inside by means
1549 of placement new.
1550
1551 When CONSIDER_BASES is false, only look for actual fields, not base types
1552 of TYPE. */
1553 bool restrict_to_inner_class (tree otr_type,
1554 bool consider_placement_new = true,
1555 bool consider_bases = true);
1556
1557 /* Adjust all offsets in contexts by given number of bits. */
1558 void offset_by (HOST_WIDE_INT);
1559 /* Use when we can not track dynamic type change. This speculatively assume
1560 type change is not happening. */
1561 void possible_dynamic_type_change (bool, tree otr_type = NULL);
1562 /* Assume that both THIS and a given context is valid and strenghten THIS
1563 if possible. Return true if any strenghtening was made.
1564 If actual type the context is being used in is known, OTR_TYPE should be
1565 set accordingly. This improves quality of combined result. */
1566 bool combine_with (ipa_polymorphic_call_context, tree otr_type = NULL);
1567 bool meet_with (ipa_polymorphic_call_context, tree otr_type = NULL);
1568
1569 /* Return TRUE if context is fully useless. */
1570 bool useless_p () const;
1571 /* Return TRUE if this context conveys the same information as X. */
1572 bool equal_to (const ipa_polymorphic_call_context &x) const;
1573
1574 /* Dump human readable context to F. If NEWLINE is true, it will be
1575 terminated by a newline. */
1576 void dump (FILE *f, bool newline = true) const;
1577 void DEBUG_FUNCTION debug () const;
1578
1579 /* LTO streaming. */
1580 void stream_out (struct output_block *) const;
1581 void stream_in (struct lto_input_block *, struct data_in *data_in);
1582
1583 private:
1584 bool combine_speculation_with (tree, HOST_WIDE_INT, bool, tree);
1585 bool meet_speculation_with (tree, HOST_WIDE_INT, bool, tree);
1586 void set_by_decl (tree, HOST_WIDE_INT);
1587 bool set_by_invariant (tree, tree, HOST_WIDE_INT);
1588 bool speculation_consistent_p (tree, HOST_WIDE_INT, bool, tree) const;
1589 void make_speculative (tree otr_type = NULL);
1590 };
1591
1592 /* Structure containing additional information about an indirect call. */
1593
1594 struct GTY(()) cgraph_indirect_call_info
1595 {
1596 /* When agg_content is set, an offset where the call pointer is located
1597 within the aggregate. */
1598 HOST_WIDE_INT offset;
1599 /* Context of the polymorphic call; use only when POLYMORPHIC flag is set. */
1600 ipa_polymorphic_call_context context;
1601 /* OBJ_TYPE_REF_TOKEN of a polymorphic call (if polymorphic is set). */
1602 HOST_WIDE_INT otr_token;
1603 /* Type of the object from OBJ_TYPE_REF_OBJECT. */
1604 tree otr_type;
1605 /* Index of the parameter that is called. */
1606 int param_index;
1607 /* ECF flags determined from the caller. */
1608 int ecf_flags;
1609 /* Profile_id of common target obtrained from profile. */
1610 int common_target_id;
1611 /* Probability that call will land in function with COMMON_TARGET_ID. */
1612 int common_target_probability;
1613
1614 /* Set when the call is a virtual call with the parameter being the
1615 associated object pointer rather than a simple direct call. */
1616 unsigned polymorphic : 1;
1617 /* Set when the call is a call of a pointer loaded from contents of an
1618 aggregate at offset. */
1619 unsigned agg_contents : 1;
1620 /* Set when this is a call through a member pointer. */
1621 unsigned member_ptr : 1;
1622 /* When the agg_contents bit is set, this one determines whether the
1623 destination is loaded from a parameter passed by reference. */
1624 unsigned by_ref : 1;
1625 /* When the agg_contents bit is set, this one determines whether we can
1626 deduce from the function body that the loaded value from the reference is
1627 never modified between the invocation of the function and the load
1628 point. */
1629 unsigned guaranteed_unmodified : 1;
1630 /* For polymorphic calls this specify whether the virtual table pointer
1631 may have changed in between function entry and the call. */
1632 unsigned vptr_changed : 1;
1633 };
1634
1635 struct GTY((chain_next ("%h.next_caller"), chain_prev ("%h.prev_caller"),
1636 for_user)) cgraph_edge {
1637 friend class cgraph_node;
1638
1639 /* Remove the edge in the cgraph. */
1640 void remove (void);
1641
1642 /* Change field call_stmt of edge to NEW_STMT.
1643 If UPDATE_SPECULATIVE and E is any component of speculative
1644 edge, then update all components. */
1645 void set_call_stmt (gcall *new_stmt, bool update_speculative = true);
1646
1647 /* Redirect callee of the edge to N. The function does not update underlying
1648 call expression. */
1649 void redirect_callee (cgraph_node *n);
1650
1651 /* If the edge does not lead to a thunk, simply redirect it to N. Otherwise
1652 create one or more equivalent thunks for N and redirect E to the first in
1653 the chain. Note that it is then necessary to call
1654 n->expand_all_artificial_thunks once all callers are redirected. */
1655 void redirect_callee_duplicating_thunks (cgraph_node *n);
1656
1657 /* Make an indirect edge with an unknown callee an ordinary edge leading to
1658 CALLEE. DELTA is an integer constant that is to be added to the this
1659 pointer (first parameter) to compensate for skipping
1660 a thunk adjustment. */
1661 cgraph_edge *make_direct (cgraph_node *callee);
1662
1663 /* Turn edge into speculative call calling N2. Update
1664 the profile so the direct call is taken COUNT times
1665 with FREQUENCY. */
1666 cgraph_edge *make_speculative (cgraph_node *n2, profile_count direct_count);
1667
1668 /* Given speculative call edge, return all three components. */
1669 void speculative_call_info (cgraph_edge *&direct, cgraph_edge *&indirect,
1670 ipa_ref *&reference);
1671
1672 /* Speculative call edge turned out to be direct call to CALLE_DECL.
1673 Remove the speculative call sequence and return edge representing the call.
1674 It is up to caller to redirect the call as appropriate. */
1675 cgraph_edge *resolve_speculation (tree callee_decl = NULL);
1676
1677 /* If necessary, change the function declaration in the call statement
1678 associated with the edge so that it corresponds to the edge callee. */
1679 gimple *redirect_call_stmt_to_callee (void);
1680
1681 /* Create clone of edge in the node N represented
1682 by CALL_EXPR the callgraph. */
1683 cgraph_edge * clone (cgraph_node *n, gcall *call_stmt, unsigned stmt_uid,
1684 profile_count num, profile_count den,
1685 bool update_original);
1686
1687 /* Verify edge count and frequency. */
1688 bool verify_count ();
1689
1690 /* Return true when call of edge can not lead to return from caller
1691 and thus it is safe to ignore its side effects for IPA analysis
1692 when computing side effects of the caller. */
1693 bool cannot_lead_to_return_p (void);
1694
1695 /* Return true when the edge represents a direct recursion. */
1696 bool recursive_p (void);
1697
1698 /* Return true if the call can be hot. */
1699 bool maybe_hot_p (void);
1700
1701 /* Rebuild cgraph edges for current function node. This needs to be run after
1702 passes that don't update the cgraph. */
1703 static unsigned int rebuild_edges (void);
1704
1705 /* Rebuild cgraph references for current function node. This needs to be run
1706 after passes that don't update the cgraph. */
1707 static void rebuild_references (void);
1708
1709 /* Expected number of executions: calculated in profile.c. */
1710 profile_count count;
1711 cgraph_node *caller;
1712 cgraph_node *callee;
1713 cgraph_edge *prev_caller;
1714 cgraph_edge *next_caller;
1715 cgraph_edge *prev_callee;
1716 cgraph_edge *next_callee;
1717 gcall *call_stmt;
1718 /* Additional information about an indirect call. Not cleared when an edge
1719 becomes direct. */
1720 cgraph_indirect_call_info *indirect_info;
1721 PTR GTY ((skip (""))) aux;
1722 /* When equal to CIF_OK, inline this call. Otherwise, points to the
1723 explanation why function was not inlined. */
1724 enum cgraph_inline_failed_t inline_failed;
1725 /* The stmt_uid of call_stmt. This is used by LTO to recover the call_stmt
1726 when the function is serialized in. */
1727 unsigned int lto_stmt_uid;
1728 /* Unique id of the edge. */
1729 int uid;
1730 /* Whether this edge was made direct by indirect inlining. */
1731 unsigned int indirect_inlining_edge : 1;
1732 /* Whether this edge describes an indirect call with an undetermined
1733 callee. */
1734 unsigned int indirect_unknown_callee : 1;
1735 /* Whether this edge is still a dangling */
1736 /* True if the corresponding CALL stmt cannot be inlined. */
1737 unsigned int call_stmt_cannot_inline_p : 1;
1738 /* Can this call throw externally? */
1739 unsigned int can_throw_external : 1;
1740 /* Edges with SPECULATIVE flag represents indirect calls that was
1741 speculatively turned into direct (i.e. by profile feedback).
1742 The final code sequence will have form:
1743
1744 if (call_target == expected_fn)
1745 expected_fn ();
1746 else
1747 call_target ();
1748
1749 Every speculative call is represented by three components attached
1750 to a same call statement:
1751 1) a direct call (to expected_fn)
1752 2) an indirect call (to call_target)
1753 3) a IPA_REF_ADDR refrence to expected_fn.
1754
1755 Optimizers may later redirect direct call to clone, so 1) and 3)
1756 do not need to necesarily agree with destination. */
1757 unsigned int speculative : 1;
1758 /* Set to true when caller is a constructor or destructor of polymorphic
1759 type. */
1760 unsigned in_polymorphic_cdtor : 1;
1761
1762 /* Return true if call must bind to current definition. */
1763 bool binds_to_current_def_p ();
1764
1765 /* Expected frequency of executions within the function.
1766 When set to CGRAPH_FREQ_BASE, the edge is expected to be called once
1767 per function call. The range is 0 to CGRAPH_FREQ_MAX. */
1768 int frequency ();
1769
1770 /* Expected frequency of executions within the function. */
1771 sreal sreal_frequency ();
1772 private:
1773 /* Remove the edge from the list of the callers of the callee. */
1774 void remove_caller (void);
1775
1776 /* Remove the edge from the list of the callees of the caller. */
1777 void remove_callee (void);
1778
1779 /* Set callee N of call graph edge and add it to the corresponding set of
1780 callers. */
1781 void set_callee (cgraph_node *n);
1782
1783 /* Output flags of edge to a file F. */
1784 void dump_edge_flags (FILE *f);
1785
1786 /* Verify that call graph edge corresponds to DECL from the associated
1787 statement. Return true if the verification should fail. */
1788 bool verify_corresponds_to_fndecl (tree decl);
1789 };
1790
1791 #define CGRAPH_FREQ_BASE 1000
1792 #define CGRAPH_FREQ_MAX 100000
1793
1794 /* The varpool data structure.
1795 Each static variable decl has assigned varpool_node. */
1796
1797 class GTY((tag ("SYMTAB_VARIABLE"))) varpool_node : public symtab_node {
1798 public:
1799 /* Dump given varpool node to F. */
1800 void dump (FILE *f);
1801
1802 /* Dump given varpool node to stderr. */
1803 void DEBUG_FUNCTION debug (void);
1804
1805 /* Remove variable from symbol table. */
1806 void remove (void);
1807
1808 /* Remove node initializer when it is no longer needed. */
1809 void remove_initializer (void);
1810
1811 void analyze (void);
1812
1813 /* Return variable availability. */
1814 availability get_availability (symtab_node *ref = NULL);
1815
1816 /* When doing LTO, read variable's constructor from disk if
1817 it is not already present. */
1818 tree get_constructor (void);
1819
1820 /* Return true if variable has constructor that can be used for folding. */
1821 bool ctor_useable_for_folding_p (void);
1822
1823 /* For given variable pool node, walk the alias chain to return the function
1824 the variable is alias of. Do not walk through thunks.
1825 When AVAILABILITY is non-NULL, get minimal availability in the chain.
1826 When REF is non-NULL, assume that reference happens in symbol REF
1827 when determining the availability. */
1828 inline varpool_node *ultimate_alias_target
1829 (availability *availability = NULL, symtab_node *ref = NULL);
1830
1831 /* Return node that alias is aliasing. */
1832 inline varpool_node *get_alias_target (void);
1833
1834 /* Output one variable, if necessary. Return whether we output it. */
1835 bool assemble_decl (void);
1836
1837 /* For variables in named sections make sure get_variable_section
1838 is called before we switch to those sections. Then section
1839 conflicts between read-only and read-only requiring relocations
1840 sections can be resolved. */
1841 void finalize_named_section_flags (void);
1842
1843 /* Call calback on varpool symbol and aliases associated to varpool symbol.
1844 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
1845 skipped. */
1846 bool call_for_symbol_and_aliases (bool (*callback) (varpool_node *, void *),
1847 void *data,
1848 bool include_overwritable);
1849
1850 /* Return true when variable should be considered externally visible. */
1851 bool externally_visible_p (void);
1852
1853 /* Return true when all references to variable must be visible
1854 in ipa_ref_list.
1855 i.e. if the variable is not externally visible or not used in some magic
1856 way (asm statement or such).
1857 The magic uses are all summarized in force_output flag. */
1858 inline bool all_refs_explicit_p ();
1859
1860 /* Return true when variable can be removed from variable pool
1861 if all direct calls are eliminated. */
1862 inline bool can_remove_if_no_refs_p (void);
1863
1864 /* Add the variable DECL to the varpool.
1865 Unlike finalize_decl function is intended to be used
1866 by middle end and allows insertion of new variable at arbitrary point
1867 of compilation. */
1868 static void add (tree decl);
1869
1870 /* Return varpool node for given symbol and check it is a function. */
1871 static inline varpool_node *get (const_tree decl);
1872
1873 /* Mark DECL as finalized. By finalizing the declaration, frontend instruct
1874 the middle end to output the variable to asm file, if needed or externally
1875 visible. */
1876 static void finalize_decl (tree decl);
1877
1878 /* Attempt to mark ALIAS as an alias to DECL. Return TRUE if successful.
1879 Extra name aliases are output whenever DECL is output. */
1880 static varpool_node * create_extra_name_alias (tree alias, tree decl);
1881
1882 /* Attempt to mark ALIAS as an alias to DECL. Return TRUE if successful.
1883 Extra name aliases are output whenever DECL is output. */
1884 static varpool_node * create_alias (tree, tree);
1885
1886 /* Dump the variable pool to F. */
1887 static void dump_varpool (FILE *f);
1888
1889 /* Dump the variable pool to stderr. */
1890 static void DEBUG_FUNCTION debug_varpool (void);
1891
1892 /* Allocate new callgraph node and insert it into basic data structures. */
1893 static varpool_node *create_empty (void);
1894
1895 /* Return varpool node assigned to DECL. Create new one when needed. */
1896 static varpool_node *get_create (tree decl);
1897
1898 /* Given an assembler name, lookup node. */
1899 static varpool_node *get_for_asmname (tree asmname);
1900
1901 /* Set when variable is scheduled to be assembled. */
1902 unsigned output : 1;
1903
1904 /* Set when variable has statically initialized pointer
1905 or is a static bounds variable and needs initalization. */
1906 unsigned need_bounds_init : 1;
1907
1908 /* Set if the variable is dynamically initialized, except for
1909 function local statics. */
1910 unsigned dynamically_initialized : 1;
1911
1912 ENUM_BITFIELD(tls_model) tls_model : 3;
1913
1914 /* Set if the variable is known to be used by single function only.
1915 This is computed by ipa_signle_use pass and used by late optimizations
1916 in places where optimization would be valid for local static variable
1917 if we did not do any inter-procedural code movement. */
1918 unsigned used_by_single_function : 1;
1919
1920 private:
1921 /* Assemble thunks and aliases associated to varpool node. */
1922 void assemble_aliases (void);
1923
1924 /* Worker for call_for_node_and_aliases. */
1925 bool call_for_symbol_and_aliases_1 (bool (*callback) (varpool_node *, void *),
1926 void *data,
1927 bool include_overwritable);
1928 };
1929
1930 /* Every top level asm statement is put into a asm_node. */
1931
1932 struct GTY(()) asm_node {
1933
1934
1935 /* Next asm node. */
1936 asm_node *next;
1937 /* String for this asm node. */
1938 tree asm_str;
1939 /* Ordering of all cgraph nodes. */
1940 int order;
1941 };
1942
1943 /* Report whether or not THIS symtab node is a function, aka cgraph_node. */
1944
1945 template <>
1946 template <>
1947 inline bool
1948 is_a_helper <cgraph_node *>::test (symtab_node *p)
1949 {
1950 return p && p->type == SYMTAB_FUNCTION;
1951 }
1952
1953 /* Report whether or not THIS symtab node is a vriable, aka varpool_node. */
1954
1955 template <>
1956 template <>
1957 inline bool
1958 is_a_helper <varpool_node *>::test (symtab_node *p)
1959 {
1960 return p && p->type == SYMTAB_VARIABLE;
1961 }
1962
1963 /* Macros to access the next item in the list of free cgraph nodes and
1964 edges. */
1965 #define NEXT_FREE_NODE(NODE) dyn_cast<cgraph_node *> ((NODE)->next)
1966 #define SET_NEXT_FREE_NODE(NODE,NODE2) ((NODE))->next = NODE2
1967 #define NEXT_FREE_EDGE(EDGE) (EDGE)->prev_caller
1968
1969 typedef void (*cgraph_edge_hook)(cgraph_edge *, void *);
1970 typedef void (*cgraph_node_hook)(cgraph_node *, void *);
1971 typedef void (*varpool_node_hook)(varpool_node *, void *);
1972 typedef void (*cgraph_2edge_hook)(cgraph_edge *, cgraph_edge *, void *);
1973 typedef void (*cgraph_2node_hook)(cgraph_node *, cgraph_node *, void *);
1974
1975 struct cgraph_edge_hook_list;
1976 struct cgraph_node_hook_list;
1977 struct varpool_node_hook_list;
1978 struct cgraph_2edge_hook_list;
1979 struct cgraph_2node_hook_list;
1980
1981 /* Map from a symbol to initialization/finalization priorities. */
1982 struct GTY(()) symbol_priority_map {
1983 priority_type init;
1984 priority_type fini;
1985 };
1986
1987 enum symtab_state
1988 {
1989 /* Frontend is parsing and finalizing functions. */
1990 PARSING,
1991 /* Callgraph is being constructed. It is safe to add new functions. */
1992 CONSTRUCTION,
1993 /* Callgraph is being streamed-in at LTO time. */
1994 LTO_STREAMING,
1995 /* Callgraph is built and early IPA passes are being run. */
1996 IPA,
1997 /* Callgraph is built and all functions are transformed to SSA form. */
1998 IPA_SSA,
1999 /* All inline decisions are done; it is now possible to remove extern inline
2000 functions and virtual call targets. */
2001 IPA_SSA_AFTER_INLINING,
2002 /* Functions are now ordered and being passed to RTL expanders. */
2003 EXPANSION,
2004 /* All cgraph expansion is done. */
2005 FINISHED
2006 };
2007
2008 struct asmname_hasher : ggc_ptr_hash <symtab_node>
2009 {
2010 typedef const_tree compare_type;
2011
2012 static hashval_t hash (symtab_node *n);
2013 static bool equal (symtab_node *n, const_tree t);
2014 };
2015
2016 class GTY((tag ("SYMTAB"))) symbol_table
2017 {
2018 public:
2019 friend class symtab_node;
2020 friend class cgraph_node;
2021 friend class cgraph_edge;
2022
2023 symbol_table (): cgraph_max_summary_uid (1)
2024 {
2025 }
2026
2027 /* Initialize callgraph dump file. */
2028 void initialize (void);
2029
2030 /* Register a top-level asm statement ASM_STR. */
2031 inline asm_node *finalize_toplevel_asm (tree asm_str);
2032
2033 /* Analyze the whole compilation unit once it is parsed completely. */
2034 void finalize_compilation_unit (void);
2035
2036 /* C++ frontend produce same body aliases all over the place, even before PCH
2037 gets streamed out. It relies on us linking the aliases with their function
2038 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
2039 first produce aliases without links, but once C++ FE is sure he won't sream
2040 PCH we build the links via this function. */
2041 void process_same_body_aliases (void);
2042
2043 /* Perform simple optimizations based on callgraph. */
2044 void compile (void);
2045
2046 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
2047 functions into callgraph in a way so they look like ordinary reachable
2048 functions inserted into callgraph already at construction time. */
2049 void process_new_functions (void);
2050
2051 /* Once all functions from compilation unit are in memory, produce all clones
2052 and update all calls. We might also do this on demand if we don't want to
2053 bring all functions to memory prior compilation, but current WHOPR
2054 implementation does that and it is bit easier to keep everything right
2055 in this order. */
2056 void materialize_all_clones (void);
2057
2058 /* Register a symbol NODE. */
2059 inline void register_symbol (symtab_node *node);
2060
2061 inline void
2062 clear_asm_symbols (void)
2063 {
2064 asmnodes = NULL;
2065 asm_last_node = NULL;
2066 }
2067
2068 /* Perform reachability analysis and reclaim all unreachable nodes. */
2069 bool remove_unreachable_nodes (FILE *file);
2070
2071 /* Optimization of function bodies might've rendered some variables as
2072 unnecessary so we want to avoid these from being compiled. Re-do
2073 reachability starting from variables that are either externally visible
2074 or was referred from the asm output routines. */
2075 void remove_unreferenced_decls (void);
2076
2077 /* Unregister a symbol NODE. */
2078 inline void unregister (symtab_node *node);
2079
2080 /* Allocate new callgraph node and insert it into basic data structures. */
2081 cgraph_node *create_empty (void);
2082
2083 /* Release a callgraph NODE with UID and put in to the list
2084 of free nodes. */
2085 void release_symbol (cgraph_node *node, int uid);
2086
2087 /* Output all variables enqueued to be assembled. */
2088 bool output_variables (void);
2089
2090 /* Weakrefs may be associated to external decls and thus not output
2091 at expansion time. Emit all necessary aliases. */
2092 void output_weakrefs (void);
2093
2094 /* Return first static symbol with definition. */
2095 inline symtab_node *first_symbol (void);
2096
2097 /* Return first assembler symbol. */
2098 inline asm_node *
2099 first_asm_symbol (void)
2100 {
2101 return asmnodes;
2102 }
2103
2104 /* Return first static symbol with definition. */
2105 inline symtab_node *first_defined_symbol (void);
2106
2107 /* Return first variable. */
2108 inline varpool_node *first_variable (void);
2109
2110 /* Return next variable after NODE. */
2111 inline varpool_node *next_variable (varpool_node *node);
2112
2113 /* Return first static variable with initializer. */
2114 inline varpool_node *first_static_initializer (void);
2115
2116 /* Return next static variable with initializer after NODE. */
2117 inline varpool_node *next_static_initializer (varpool_node *node);
2118
2119 /* Return first static variable with definition. */
2120 inline varpool_node *first_defined_variable (void);
2121
2122 /* Return next static variable with definition after NODE. */
2123 inline varpool_node *next_defined_variable (varpool_node *node);
2124
2125 /* Return first function with body defined. */
2126 inline cgraph_node *first_defined_function (void);
2127
2128 /* Return next function with body defined after NODE. */
2129 inline cgraph_node *next_defined_function (cgraph_node *node);
2130
2131 /* Return first function. */
2132 inline cgraph_node *first_function (void);
2133
2134 /* Return next function. */
2135 inline cgraph_node *next_function (cgraph_node *node);
2136
2137 /* Return first function with body defined. */
2138 cgraph_node *first_function_with_gimple_body (void);
2139
2140 /* Return next reachable static variable with initializer after NODE. */
2141 inline cgraph_node *next_function_with_gimple_body (cgraph_node *node);
2142
2143 /* Register HOOK to be called with DATA on each removed edge. */
2144 cgraph_edge_hook_list *add_edge_removal_hook (cgraph_edge_hook hook,
2145 void *data);
2146
2147 /* Remove ENTRY from the list of hooks called on removing edges. */
2148 void remove_edge_removal_hook (cgraph_edge_hook_list *entry);
2149
2150 /* Register HOOK to be called with DATA on each removed node. */
2151 cgraph_node_hook_list *add_cgraph_removal_hook (cgraph_node_hook hook,
2152 void *data);
2153
2154 /* Remove ENTRY from the list of hooks called on removing nodes. */
2155 void remove_cgraph_removal_hook (cgraph_node_hook_list *entry);
2156
2157 /* Register HOOK to be called with DATA on each removed node. */
2158 varpool_node_hook_list *add_varpool_removal_hook (varpool_node_hook hook,
2159 void *data);
2160
2161 /* Remove ENTRY from the list of hooks called on removing nodes. */
2162 void remove_varpool_removal_hook (varpool_node_hook_list *entry);
2163
2164 /* Register HOOK to be called with DATA on each inserted node. */
2165 cgraph_node_hook_list *add_cgraph_insertion_hook (cgraph_node_hook hook,
2166 void *data);
2167
2168 /* Remove ENTRY from the list of hooks called on inserted nodes. */
2169 void remove_cgraph_insertion_hook (cgraph_node_hook_list *entry);
2170
2171 /* Register HOOK to be called with DATA on each inserted node. */
2172 varpool_node_hook_list *add_varpool_insertion_hook (varpool_node_hook hook,
2173 void *data);
2174
2175 /* Remove ENTRY from the list of hooks called on inserted nodes. */
2176 void remove_varpool_insertion_hook (varpool_node_hook_list *entry);
2177
2178 /* Register HOOK to be called with DATA on each duplicated edge. */
2179 cgraph_2edge_hook_list *add_edge_duplication_hook (cgraph_2edge_hook hook,
2180 void *data);
2181 /* Remove ENTRY from the list of hooks called on duplicating edges. */
2182 void remove_edge_duplication_hook (cgraph_2edge_hook_list *entry);
2183
2184 /* Register HOOK to be called with DATA on each duplicated node. */
2185 cgraph_2node_hook_list *add_cgraph_duplication_hook (cgraph_2node_hook hook,
2186 void *data);
2187
2188 /* Remove ENTRY from the list of hooks called on duplicating nodes. */
2189 void remove_cgraph_duplication_hook (cgraph_2node_hook_list *entry);
2190
2191 /* Call all edge removal hooks. */
2192 void call_edge_removal_hooks (cgraph_edge *e);
2193
2194 /* Call all node insertion hooks. */
2195 void call_cgraph_insertion_hooks (cgraph_node *node);
2196
2197 /* Call all node removal hooks. */
2198 void call_cgraph_removal_hooks (cgraph_node *node);
2199
2200 /* Call all node duplication hooks. */
2201 void call_cgraph_duplication_hooks (cgraph_node *node, cgraph_node *node2);
2202
2203 /* Call all edge duplication hooks. */
2204 void call_edge_duplication_hooks (cgraph_edge *cs1, cgraph_edge *cs2);
2205
2206 /* Call all node removal hooks. */
2207 void call_varpool_removal_hooks (varpool_node *node);
2208
2209 /* Call all node insertion hooks. */
2210 void call_varpool_insertion_hooks (varpool_node *node);
2211
2212 /* Arrange node to be first in its entry of assembler_name_hash. */
2213 void symtab_prevail_in_asm_name_hash (symtab_node *node);
2214
2215 /* Initalize asm name hash unless. */
2216 void symtab_initialize_asm_name_hash (void);
2217
2218 /* Set the DECL_ASSEMBLER_NAME and update symtab hashtables. */
2219 void change_decl_assembler_name (tree decl, tree name);
2220
2221 /* Dump symbol table to F. */
2222 void dump (FILE *f);
2223
2224 /* Dump symbol table to stderr. */
2225 inline DEBUG_FUNCTION void debug (void)
2226 {
2227 dump (stderr);
2228 }
2229
2230 /* Return true if assembler names NAME1 and NAME2 leads to the same symbol
2231 name. */
2232 static bool assembler_names_equal_p (const char *name1, const char *name2);
2233
2234 int cgraph_count;
2235 int cgraph_max_uid;
2236 int cgraph_max_summary_uid;
2237
2238 int edges_count;
2239 int edges_max_uid;
2240
2241 symtab_node* GTY(()) nodes;
2242 asm_node* GTY(()) asmnodes;
2243 asm_node* GTY(()) asm_last_node;
2244 cgraph_node* GTY(()) free_nodes;
2245
2246 /* Head of a linked list of unused (freed) call graph edges.
2247 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
2248 cgraph_edge * GTY(()) free_edges;
2249
2250 /* The order index of the next symtab node to be created. This is
2251 used so that we can sort the cgraph nodes in order by when we saw
2252 them, to support -fno-toplevel-reorder. */
2253 int order;
2254
2255 /* Set when whole unit has been analyzed so we can access global info. */
2256 bool global_info_ready;
2257 /* What state callgraph is in right now. */
2258 enum symtab_state state;
2259 /* Set when the cgraph is fully build and the basic flags are computed. */
2260 bool function_flags_ready;
2261
2262 bool cpp_implicit_aliases_done;
2263
2264 /* Hash table used to hold sectoons. */
2265 hash_table<section_name_hasher> *GTY(()) section_hash;
2266
2267 /* Hash table used to convert assembler names into nodes. */
2268 hash_table<asmname_hasher> *assembler_name_hash;
2269
2270 /* Hash table used to hold init priorities. */
2271 hash_map<symtab_node *, symbol_priority_map> *init_priority_hash;
2272
2273 FILE* GTY ((skip)) dump_file;
2274
2275 /* Return symbol used to separate symbol name from suffix. */
2276 static char symbol_suffix_separator ();
2277
2278 FILE* GTY ((skip)) ipa_clones_dump_file;
2279
2280 hash_set <const cgraph_node *> GTY ((skip)) cloned_nodes;
2281
2282 private:
2283 /* Allocate new callgraph node. */
2284 inline cgraph_node * allocate_cgraph_symbol (void);
2285
2286 /* Allocate a cgraph_edge structure and fill it with data according to the
2287 parameters of which only CALLEE can be NULL (when creating an indirect call
2288 edge). */
2289 cgraph_edge *create_edge (cgraph_node *caller, cgraph_node *callee,
2290 gcall *call_stmt, profile_count count,
2291 bool indir_unknown_callee);
2292
2293 /* Put the edge onto the free list. */
2294 void free_edge (cgraph_edge *e);
2295
2296 /* Insert NODE to assembler name hash. */
2297 void insert_to_assembler_name_hash (symtab_node *node, bool with_clones);
2298
2299 /* Remove NODE from assembler name hash. */
2300 void unlink_from_assembler_name_hash (symtab_node *node, bool with_clones);
2301
2302 /* Hash asmnames ignoring the user specified marks. */
2303 static hashval_t decl_assembler_name_hash (const_tree asmname);
2304
2305 /* Compare ASMNAME with the DECL_ASSEMBLER_NAME of DECL. */
2306 static bool decl_assembler_name_equal (tree decl, const_tree asmname);
2307
2308 friend struct asmname_hasher;
2309
2310 /* List of hooks triggered when an edge is removed. */
2311 cgraph_edge_hook_list * GTY((skip)) m_first_edge_removal_hook;
2312 /* List of hooks triggem_red when a cgraph node is removed. */
2313 cgraph_node_hook_list * GTY((skip)) m_first_cgraph_removal_hook;
2314 /* List of hooks triggered when an edge is duplicated. */
2315 cgraph_2edge_hook_list * GTY((skip)) m_first_edge_duplicated_hook;
2316 /* List of hooks triggered when a node is duplicated. */
2317 cgraph_2node_hook_list * GTY((skip)) m_first_cgraph_duplicated_hook;
2318 /* List of hooks triggered when an function is inserted. */
2319 cgraph_node_hook_list * GTY((skip)) m_first_cgraph_insertion_hook;
2320 /* List of hooks triggered when an variable is inserted. */
2321 varpool_node_hook_list * GTY((skip)) m_first_varpool_insertion_hook;
2322 /* List of hooks triggered when a node is removed. */
2323 varpool_node_hook_list * GTY((skip)) m_first_varpool_removal_hook;
2324 };
2325
2326 extern GTY(()) symbol_table *symtab;
2327
2328 extern vec<cgraph_node *> cgraph_new_nodes;
2329
2330 inline hashval_t
2331 asmname_hasher::hash (symtab_node *n)
2332 {
2333 return symbol_table::decl_assembler_name_hash
2334 (DECL_ASSEMBLER_NAME (n->decl));
2335 }
2336
2337 inline bool
2338 asmname_hasher::equal (symtab_node *n, const_tree t)
2339 {
2340 return symbol_table::decl_assembler_name_equal (n->decl, t);
2341 }
2342
2343 /* In cgraph.c */
2344 void cgraph_c_finalize (void);
2345 void release_function_body (tree);
2346 cgraph_indirect_call_info *cgraph_allocate_init_indirect_info (void);
2347
2348 void cgraph_update_edges_for_call_stmt (gimple *, tree, gimple *);
2349 bool cgraph_function_possibly_inlined_p (tree);
2350
2351 const char* cgraph_inline_failed_string (cgraph_inline_failed_t);
2352 cgraph_inline_failed_type_t cgraph_inline_failed_type (cgraph_inline_failed_t);
2353
2354 extern bool gimple_check_call_matching_types (gimple *, tree, bool);
2355
2356 /* In cgraphunit.c */
2357 void cgraphunit_c_finalize (void);
2358
2359 /* Initialize datastructures so DECL is a function in lowered gimple form.
2360 IN_SSA is true if the gimple is in SSA. */
2361 basic_block init_lowered_empty_function (tree, bool, profile_count);
2362
2363 tree thunk_adjust (gimple_stmt_iterator *, tree, bool, HOST_WIDE_INT, tree);
2364 /* In cgraphclones.c */
2365
2366 tree clone_function_name_1 (const char *, const char *);
2367 tree clone_function_name (tree decl, const char *);
2368
2369 void tree_function_versioning (tree, tree, vec<ipa_replace_map *, va_gc> *,
2370 bool, bitmap, bool, bitmap, basic_block);
2371
2372 void dump_callgraph_transformation (const cgraph_node *original,
2373 const cgraph_node *clone,
2374 const char *suffix);
2375 tree cgraph_build_function_type_skip_args (tree orig_type, bitmap args_to_skip,
2376 bool skip_return);
2377
2378 /* In cgraphbuild.c */
2379 int compute_call_stmt_bb_frequency (tree, basic_block bb);
2380 void record_references_in_initializer (tree, bool);
2381
2382 /* In ipa.c */
2383 void cgraph_build_static_cdtor (char which, tree body, int priority);
2384 bool ipa_discover_readonly_nonaddressable_vars (void);
2385
2386 /* In varpool.c */
2387 tree ctor_for_folding (tree);
2388
2389 /* In tree-chkp.c */
2390 extern bool chkp_function_instrumented_p (tree fndecl);
2391
2392 /* In ipa-inline-analysis.c */
2393 void initialize_inline_failed (struct cgraph_edge *);
2394 bool speculation_useful_p (struct cgraph_edge *e, bool anticipate_inlining);
2395
2396 /* Return true when the symbol is real symbol, i.e. it is not inline clone
2397 or abstract function kept for debug info purposes only. */
2398 inline bool
2399 symtab_node::real_symbol_p (void)
2400 {
2401 cgraph_node *cnode;
2402
2403 if (DECL_ABSTRACT_P (decl))
2404 return false;
2405 if (transparent_alias && definition)
2406 return false;
2407 if (!is_a <cgraph_node *> (this))
2408 return true;
2409 cnode = dyn_cast <cgraph_node *> (this);
2410 if (cnode->global.inlined_to)
2411 return false;
2412 return true;
2413 }
2414
2415 /* Return true if DECL should have entry in symbol table if used.
2416 Those are functions and static & external veriables*/
2417
2418 static inline bool
2419 decl_in_symtab_p (const_tree decl)
2420 {
2421 return (TREE_CODE (decl) == FUNCTION_DECL
2422 || (TREE_CODE (decl) == VAR_DECL
2423 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl))));
2424 }
2425
2426 inline bool
2427 symtab_node::in_same_comdat_group_p (symtab_node *target)
2428 {
2429 symtab_node *source = this;
2430
2431 if (cgraph_node *cn = dyn_cast <cgraph_node *> (target))
2432 {
2433 if (cn->global.inlined_to)
2434 source = cn->global.inlined_to;
2435 }
2436 if (cgraph_node *cn = dyn_cast <cgraph_node *> (target))
2437 {
2438 if (cn->global.inlined_to)
2439 target = cn->global.inlined_to;
2440 }
2441
2442 return source->get_comdat_group () == target->get_comdat_group ();
2443 }
2444
2445 /* Return node that alias is aliasing. */
2446
2447 inline symtab_node *
2448 symtab_node::get_alias_target (void)
2449 {
2450 ipa_ref *ref = NULL;
2451 iterate_reference (0, ref);
2452 if (ref->use == IPA_REF_CHKP)
2453 iterate_reference (1, ref);
2454 gcc_checking_assert (ref->use == IPA_REF_ALIAS);
2455 return ref->referred;
2456 }
2457
2458 /* Return next reachable static symbol with initializer after the node. */
2459
2460 inline symtab_node *
2461 symtab_node::next_defined_symbol (void)
2462 {
2463 symtab_node *node1 = next;
2464
2465 for (; node1; node1 = node1->next)
2466 if (node1->definition)
2467 return node1;
2468
2469 return NULL;
2470 }
2471
2472 /* Iterates I-th reference in the list, REF is also set. */
2473
2474 inline ipa_ref *
2475 symtab_node::iterate_reference (unsigned i, ipa_ref *&ref)
2476 {
2477 vec_safe_iterate (ref_list.references, i, &ref);
2478
2479 return ref;
2480 }
2481
2482 /* Iterates I-th referring item in the list, REF is also set. */
2483
2484 inline ipa_ref *
2485 symtab_node::iterate_referring (unsigned i, ipa_ref *&ref)
2486 {
2487 ref_list.referring.iterate (i, &ref);
2488
2489 return ref;
2490 }
2491
2492 /* Iterates I-th referring alias item in the list, REF is also set. */
2493
2494 inline ipa_ref *
2495 symtab_node::iterate_direct_aliases (unsigned i, ipa_ref *&ref)
2496 {
2497 ref_list.referring.iterate (i, &ref);
2498
2499 if (ref && ref->use != IPA_REF_ALIAS)
2500 return NULL;
2501
2502 return ref;
2503 }
2504
2505 /* Return true if list contains an alias. */
2506
2507 inline bool
2508 symtab_node::has_aliases_p (void)
2509 {
2510 ipa_ref *ref = NULL;
2511
2512 return (iterate_direct_aliases (0, ref) != NULL);
2513 }
2514
2515 /* Return true when RESOLUTION indicate that linker will use
2516 the symbol from non-LTO object files. */
2517
2518 inline bool
2519 resolution_used_from_other_file_p (enum ld_plugin_symbol_resolution resolution)
2520 {
2521 return (resolution == LDPR_PREVAILING_DEF
2522 || resolution == LDPR_PREEMPTED_REG
2523 || resolution == LDPR_RESOLVED_EXEC
2524 || resolution == LDPR_RESOLVED_DYN);
2525 }
2526
2527 /* Return true when symtab_node is known to be used from other (non-LTO)
2528 object file. Known only when doing LTO via linker plugin. */
2529
2530 inline bool
2531 symtab_node::used_from_object_file_p (void)
2532 {
2533 if (!TREE_PUBLIC (decl) || DECL_EXTERNAL (decl))
2534 return false;
2535 if (resolution_used_from_other_file_p (resolution))
2536 return true;
2537 return false;
2538 }
2539
2540 /* Return varpool node for given symbol and check it is a function. */
2541
2542 inline varpool_node *
2543 varpool_node::get (const_tree decl)
2544 {
2545 gcc_checking_assert (TREE_CODE (decl) == VAR_DECL);
2546 return dyn_cast<varpool_node *> (symtab_node::get (decl));
2547 }
2548
2549 /* Register a symbol NODE. */
2550
2551 inline void
2552 symbol_table::register_symbol (symtab_node *node)
2553 {
2554 node->next = nodes;
2555 node->previous = NULL;
2556
2557 if (nodes)
2558 nodes->previous = node;
2559 nodes = node;
2560
2561 node->order = order++;
2562 }
2563
2564 /* Register a top-level asm statement ASM_STR. */
2565
2566 asm_node *
2567 symbol_table::finalize_toplevel_asm (tree asm_str)
2568 {
2569 asm_node *node;
2570
2571 node = ggc_cleared_alloc<asm_node> ();
2572 node->asm_str = asm_str;
2573 node->order = order++;
2574 node->next = NULL;
2575
2576 if (asmnodes == NULL)
2577 asmnodes = node;
2578 else
2579 asm_last_node->next = node;
2580
2581 asm_last_node = node;
2582 return node;
2583 }
2584
2585 /* Unregister a symbol NODE. */
2586 inline void
2587 symbol_table::unregister (symtab_node *node)
2588 {
2589 if (node->previous)
2590 node->previous->next = node->next;
2591 else
2592 nodes = node->next;
2593
2594 if (node->next)
2595 node->next->previous = node->previous;
2596
2597 node->next = NULL;
2598 node->previous = NULL;
2599 }
2600
2601 /* Release a callgraph NODE with UID and put in to the list of free nodes. */
2602
2603 inline void
2604 symbol_table::release_symbol (cgraph_node *node, int uid)
2605 {
2606 cgraph_count--;
2607
2608 /* Clear out the node to NULL all pointers and add the node to the free
2609 list. */
2610 memset (node, 0, sizeof (*node));
2611 node->type = SYMTAB_FUNCTION;
2612 node->uid = uid;
2613 SET_NEXT_FREE_NODE (node, free_nodes);
2614 free_nodes = node;
2615 }
2616
2617 /* Allocate new callgraph node. */
2618
2619 inline cgraph_node *
2620 symbol_table::allocate_cgraph_symbol (void)
2621 {
2622 cgraph_node *node;
2623
2624 if (free_nodes)
2625 {
2626 node = free_nodes;
2627 free_nodes = NEXT_FREE_NODE (node);
2628 }
2629 else
2630 {
2631 node = ggc_cleared_alloc<cgraph_node> ();
2632 node->uid = cgraph_max_uid++;
2633 }
2634
2635 node->summary_uid = cgraph_max_summary_uid++;
2636 return node;
2637 }
2638
2639
2640 /* Return first static symbol with definition. */
2641 inline symtab_node *
2642 symbol_table::first_symbol (void)
2643 {
2644 return nodes;
2645 }
2646
2647 /* Walk all symbols. */
2648 #define FOR_EACH_SYMBOL(node) \
2649 for ((node) = symtab->first_symbol (); (node); (node) = (node)->next)
2650
2651 /* Return first static symbol with definition. */
2652 inline symtab_node *
2653 symbol_table::first_defined_symbol (void)
2654 {
2655 symtab_node *node;
2656
2657 for (node = nodes; node; node = node->next)
2658 if (node->definition)
2659 return node;
2660
2661 return NULL;
2662 }
2663
2664 /* Walk all symbols with definitions in current unit. */
2665 #define FOR_EACH_DEFINED_SYMBOL(node) \
2666 for ((node) = symtab->first_defined_symbol (); (node); \
2667 (node) = node->next_defined_symbol ())
2668
2669 /* Return first variable. */
2670 inline varpool_node *
2671 symbol_table::first_variable (void)
2672 {
2673 symtab_node *node;
2674 for (node = nodes; node; node = node->next)
2675 if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
2676 return vnode;
2677 return NULL;
2678 }
2679
2680 /* Return next variable after NODE. */
2681 inline varpool_node *
2682 symbol_table::next_variable (varpool_node *node)
2683 {
2684 symtab_node *node1 = node->next;
2685 for (; node1; node1 = node1->next)
2686 if (varpool_node *vnode1 = dyn_cast <varpool_node *> (node1))
2687 return vnode1;
2688 return NULL;
2689 }
2690 /* Walk all variables. */
2691 #define FOR_EACH_VARIABLE(node) \
2692 for ((node) = symtab->first_variable (); \
2693 (node); \
2694 (node) = symtab->next_variable ((node)))
2695
2696 /* Return first static variable with initializer. */
2697 inline varpool_node *
2698 symbol_table::first_static_initializer (void)
2699 {
2700 symtab_node *node;
2701 for (node = nodes; node; node = node->next)
2702 {
2703 varpool_node *vnode = dyn_cast <varpool_node *> (node);
2704 if (vnode && DECL_INITIAL (node->decl))
2705 return vnode;
2706 }
2707 return NULL;
2708 }
2709
2710 /* Return next static variable with initializer after NODE. */
2711 inline varpool_node *
2712 symbol_table::next_static_initializer (varpool_node *node)
2713 {
2714 symtab_node *node1 = node->next;
2715 for (; node1; node1 = node1->next)
2716 {
2717 varpool_node *vnode1 = dyn_cast <varpool_node *> (node1);
2718 if (vnode1 && DECL_INITIAL (node1->decl))
2719 return vnode1;
2720 }
2721 return NULL;
2722 }
2723
2724 /* Walk all static variables with initializer set. */
2725 #define FOR_EACH_STATIC_INITIALIZER(node) \
2726 for ((node) = symtab->first_static_initializer (); (node); \
2727 (node) = symtab->next_static_initializer (node))
2728
2729 /* Return first static variable with definition. */
2730 inline varpool_node *
2731 symbol_table::first_defined_variable (void)
2732 {
2733 symtab_node *node;
2734 for (node = nodes; node; node = node->next)
2735 {
2736 varpool_node *vnode = dyn_cast <varpool_node *> (node);
2737 if (vnode && vnode->definition)
2738 return vnode;
2739 }
2740 return NULL;
2741 }
2742
2743 /* Return next static variable with definition after NODE. */
2744 inline varpool_node *
2745 symbol_table::next_defined_variable (varpool_node *node)
2746 {
2747 symtab_node *node1 = node->next;
2748 for (; node1; node1 = node1->next)
2749 {
2750 varpool_node *vnode1 = dyn_cast <varpool_node *> (node1);
2751 if (vnode1 && vnode1->definition)
2752 return vnode1;
2753 }
2754 return NULL;
2755 }
2756 /* Walk all variables with definitions in current unit. */
2757 #define FOR_EACH_DEFINED_VARIABLE(node) \
2758 for ((node) = symtab->first_defined_variable (); (node); \
2759 (node) = symtab->next_defined_variable (node))
2760
2761 /* Return first function with body defined. */
2762 inline cgraph_node *
2763 symbol_table::first_defined_function (void)
2764 {
2765 symtab_node *node;
2766 for (node = nodes; node; node = node->next)
2767 {
2768 cgraph_node *cn = dyn_cast <cgraph_node *> (node);
2769 if (cn && cn->definition)
2770 return cn;
2771 }
2772 return NULL;
2773 }
2774
2775 /* Return next function with body defined after NODE. */
2776 inline cgraph_node *
2777 symbol_table::next_defined_function (cgraph_node *node)
2778 {
2779 symtab_node *node1 = node->next;
2780 for (; node1; node1 = node1->next)
2781 {
2782 cgraph_node *cn1 = dyn_cast <cgraph_node *> (node1);
2783 if (cn1 && cn1->definition)
2784 return cn1;
2785 }
2786 return NULL;
2787 }
2788
2789 /* Walk all functions with body defined. */
2790 #define FOR_EACH_DEFINED_FUNCTION(node) \
2791 for ((node) = symtab->first_defined_function (); (node); \
2792 (node) = symtab->next_defined_function ((node)))
2793
2794 /* Return first function. */
2795 inline cgraph_node *
2796 symbol_table::first_function (void)
2797 {
2798 symtab_node *node;
2799 for (node = nodes; node; node = node->next)
2800 if (cgraph_node *cn = dyn_cast <cgraph_node *> (node))
2801 return cn;
2802 return NULL;
2803 }
2804
2805 /* Return next function. */
2806 inline cgraph_node *
2807 symbol_table::next_function (cgraph_node *node)
2808 {
2809 symtab_node *node1 = node->next;
2810 for (; node1; node1 = node1->next)
2811 if (cgraph_node *cn1 = dyn_cast <cgraph_node *> (node1))
2812 return cn1;
2813 return NULL;
2814 }
2815
2816 /* Return first function with body defined. */
2817 inline cgraph_node *
2818 symbol_table::first_function_with_gimple_body (void)
2819 {
2820 symtab_node *node;
2821 for (node = nodes; node; node = node->next)
2822 {
2823 cgraph_node *cn = dyn_cast <cgraph_node *> (node);
2824 if (cn && cn->has_gimple_body_p ())
2825 return cn;
2826 }
2827 return NULL;
2828 }
2829
2830 /* Return next reachable static variable with initializer after NODE. */
2831 inline cgraph_node *
2832 symbol_table::next_function_with_gimple_body (cgraph_node *node)
2833 {
2834 symtab_node *node1 = node->next;
2835 for (; node1; node1 = node1->next)
2836 {
2837 cgraph_node *cn1 = dyn_cast <cgraph_node *> (node1);
2838 if (cn1 && cn1->has_gimple_body_p ())
2839 return cn1;
2840 }
2841 return NULL;
2842 }
2843
2844 /* Walk all functions. */
2845 #define FOR_EACH_FUNCTION(node) \
2846 for ((node) = symtab->first_function (); (node); \
2847 (node) = symtab->next_function ((node)))
2848
2849 /* Return true when callgraph node is a function with Gimple body defined
2850 in current unit. Functions can also be define externally or they
2851 can be thunks with no Gimple representation.
2852
2853 Note that at WPA stage, the function body may not be present in memory. */
2854
2855 inline bool
2856 cgraph_node::has_gimple_body_p (void)
2857 {
2858 return definition && !thunk.thunk_p && !alias;
2859 }
2860
2861 /* Walk all functions with body defined. */
2862 #define FOR_EACH_FUNCTION_WITH_GIMPLE_BODY(node) \
2863 for ((node) = symtab->first_function_with_gimple_body (); (node); \
2864 (node) = symtab->next_function_with_gimple_body (node))
2865
2866 /* Uniquize all constants that appear in memory.
2867 Each constant in memory thus far output is recorded
2868 in `const_desc_table'. */
2869
2870 struct GTY((for_user)) constant_descriptor_tree {
2871 /* A MEM for the constant. */
2872 rtx rtl;
2873
2874 /* The value of the constant. */
2875 tree value;
2876
2877 /* Hash of value. Computing the hash from value each time
2878 hashfn is called can't work properly, as that means recursive
2879 use of the hash table during hash table expansion. */
2880 hashval_t hash;
2881 };
2882
2883 /* Return true when function is only called directly or it has alias.
2884 i.e. it is not externally visible, address was not taken and
2885 it is not used in any other non-standard way. */
2886
2887 inline bool
2888 cgraph_node::only_called_directly_or_aliased_p (void)
2889 {
2890 gcc_assert (!global.inlined_to);
2891 return (!force_output && !address_taken
2892 && !used_from_other_partition
2893 && !DECL_VIRTUAL_P (decl)
2894 && !DECL_STATIC_CONSTRUCTOR (decl)
2895 && !DECL_STATIC_DESTRUCTOR (decl)
2896 && !used_from_object_file_p ()
2897 && !externally_visible);
2898 }
2899
2900 /* Return true when function can be removed from callgraph
2901 if all direct calls are eliminated. */
2902
2903 inline bool
2904 cgraph_node::can_remove_if_no_direct_calls_and_refs_p (void)
2905 {
2906 gcc_checking_assert (!global.inlined_to);
2907 /* Instrumentation clones should not be removed before
2908 instrumentation happens. New callers may appear after
2909 instrumentation. */
2910 if (instrumentation_clone
2911 && !chkp_function_instrumented_p (decl))
2912 return false;
2913 /* Extern inlines can always go, we will use the external definition. */
2914 if (DECL_EXTERNAL (decl))
2915 return true;
2916 /* When function is needed, we can not remove it. */
2917 if (force_output || used_from_other_partition)
2918 return false;
2919 if (DECL_STATIC_CONSTRUCTOR (decl)
2920 || DECL_STATIC_DESTRUCTOR (decl))
2921 return false;
2922 /* Only COMDAT functions can be removed if externally visible. */
2923 if (externally_visible
2924 && (!DECL_COMDAT (decl)
2925 || forced_by_abi
2926 || used_from_object_file_p ()))
2927 return false;
2928 return true;
2929 }
2930
2931 /* Verify cgraph, if consistency checking is enabled. */
2932
2933 inline void
2934 cgraph_node::checking_verify_cgraph_nodes (void)
2935 {
2936 if (flag_checking)
2937 cgraph_node::verify_cgraph_nodes ();
2938 }
2939
2940 /* Return true when variable can be removed from variable pool
2941 if all direct calls are eliminated. */
2942
2943 inline bool
2944 varpool_node::can_remove_if_no_refs_p (void)
2945 {
2946 if (DECL_EXTERNAL (decl))
2947 return true;
2948 return (!force_output && !used_from_other_partition
2949 && ((DECL_COMDAT (decl)
2950 && !forced_by_abi
2951 && !used_from_object_file_p ())
2952 || !externally_visible
2953 || DECL_HAS_VALUE_EXPR_P (decl)));
2954 }
2955
2956 /* Return true when all references to variable must be visible in ipa_ref_list.
2957 i.e. if the variable is not externally visible or not used in some magic
2958 way (asm statement or such).
2959 The magic uses are all summarized in force_output flag. */
2960
2961 inline bool
2962 varpool_node::all_refs_explicit_p ()
2963 {
2964 return (definition
2965 && !externally_visible
2966 && !used_from_other_partition
2967 && !force_output);
2968 }
2969
2970 struct tree_descriptor_hasher : ggc_ptr_hash<constant_descriptor_tree>
2971 {
2972 static hashval_t hash (constant_descriptor_tree *);
2973 static bool equal (constant_descriptor_tree *, constant_descriptor_tree *);
2974 };
2975
2976 /* Constant pool accessor function. */
2977 hash_table<tree_descriptor_hasher> *constant_pool_htab (void);
2978
2979 /* Return node that alias is aliasing. */
2980
2981 inline cgraph_node *
2982 cgraph_node::get_alias_target (void)
2983 {
2984 return dyn_cast <cgraph_node *> (symtab_node::get_alias_target ());
2985 }
2986
2987 /* Return node that alias is aliasing. */
2988
2989 inline varpool_node *
2990 varpool_node::get_alias_target (void)
2991 {
2992 return dyn_cast <varpool_node *> (symtab_node::get_alias_target ());
2993 }
2994
2995 /* Walk the alias chain to return the symbol NODE is alias of.
2996 If NODE is not an alias, return NODE.
2997 When AVAILABILITY is non-NULL, get minimal availability in the chain.
2998 When REF is non-NULL, assume that reference happens in symbol REF
2999 when determining the availability. */
3000
3001 inline symtab_node *
3002 symtab_node::ultimate_alias_target (enum availability *availability,
3003 symtab_node *ref)
3004 {
3005 if (!alias)
3006 {
3007 if (availability)
3008 *availability = get_availability (ref);
3009 return this;
3010 }
3011
3012 return ultimate_alias_target_1 (availability, ref);
3013 }
3014
3015 /* Given function symbol, walk the alias chain to return the function node
3016 is alias of. Do not walk through thunks.
3017 When AVAILABILITY is non-NULL, get minimal availability in the chain.
3018 When REF is non-NULL, assume that reference happens in symbol REF
3019 when determining the availability. */
3020
3021 inline cgraph_node *
3022 cgraph_node::ultimate_alias_target (enum availability *availability,
3023 symtab_node *ref)
3024 {
3025 cgraph_node *n = dyn_cast <cgraph_node *>
3026 (symtab_node::ultimate_alias_target (availability, ref));
3027 if (!n && availability)
3028 *availability = AVAIL_NOT_AVAILABLE;
3029 return n;
3030 }
3031
3032 /* For given variable pool node, walk the alias chain to return the function
3033 the variable is alias of. Do not walk through thunks.
3034 When AVAILABILITY is non-NULL, get minimal availability in the chain.
3035 When REF is non-NULL, assume that reference happens in symbol REF
3036 when determining the availability. */
3037
3038 inline varpool_node *
3039 varpool_node::ultimate_alias_target (availability *availability,
3040 symtab_node *ref)
3041 {
3042 varpool_node *n = dyn_cast <varpool_node *>
3043 (symtab_node::ultimate_alias_target (availability, ref));
3044
3045 if (!n && availability)
3046 *availability = AVAIL_NOT_AVAILABLE;
3047 return n;
3048 }
3049
3050 /* Set callee N of call graph edge and add it to the corresponding set of
3051 callers. */
3052
3053 inline void
3054 cgraph_edge::set_callee (cgraph_node *n)
3055 {
3056 prev_caller = NULL;
3057 if (n->callers)
3058 n->callers->prev_caller = this;
3059 next_caller = n->callers;
3060 n->callers = this;
3061 callee = n;
3062 }
3063
3064 /* Redirect callee of the edge to N. The function does not update underlying
3065 call expression. */
3066
3067 inline void
3068 cgraph_edge::redirect_callee (cgraph_node *n)
3069 {
3070 /* Remove from callers list of the current callee. */
3071 remove_callee ();
3072
3073 /* Insert to callers list of the new callee. */
3074 set_callee (n);
3075 }
3076
3077 /* Return true when the edge represents a direct recursion. */
3078
3079 inline bool
3080 cgraph_edge::recursive_p (void)
3081 {
3082 cgraph_node *c = callee->ultimate_alias_target ();
3083 if (caller->global.inlined_to)
3084 return caller->global.inlined_to->decl == c->decl;
3085 else
3086 return caller->decl == c->decl;
3087 }
3088
3089 /* Remove the edge from the list of the callers of the callee. */
3090
3091 inline void
3092 cgraph_edge::remove_callee (void)
3093 {
3094 gcc_assert (!indirect_unknown_callee);
3095 if (prev_caller)
3096 prev_caller->next_caller = next_caller;
3097 if (next_caller)
3098 next_caller->prev_caller = prev_caller;
3099 if (!prev_caller)
3100 callee->callers = next_caller;
3101 }
3102
3103 /* Return true if call must bind to current definition. */
3104
3105 inline bool
3106 cgraph_edge::binds_to_current_def_p ()
3107 {
3108 if (callee)
3109 return callee->binds_to_current_def_p (caller);
3110 else
3111 return false;
3112 }
3113
3114 /* Expected frequency of executions within the function.
3115 When set to CGRAPH_FREQ_BASE, the edge is expected to be called once
3116 per function call. The range is 0 to CGRAPH_FREQ_MAX. */
3117
3118 inline int
3119 cgraph_edge::frequency ()
3120 {
3121 return count.to_cgraph_frequency (caller->global.inlined_to
3122 ? caller->global.inlined_to->count
3123 : caller->count);
3124 }
3125
3126
3127 /* Return true if the TM_CLONE bit is set for a given FNDECL. */
3128 static inline bool
3129 decl_is_tm_clone (const_tree fndecl)
3130 {
3131 cgraph_node *n = cgraph_node::get (fndecl);
3132 if (n)
3133 return n->tm_clone;
3134 return false;
3135 }
3136
3137 /* Likewise indicate that a node is needed, i.e. reachable via some
3138 external means. */
3139
3140 inline void
3141 cgraph_node::mark_force_output (void)
3142 {
3143 force_output = 1;
3144 gcc_checking_assert (!global.inlined_to);
3145 }
3146
3147 /* Return true if function should be optimized for size. */
3148
3149 inline bool
3150 cgraph_node::optimize_for_size_p (void)
3151 {
3152 if (opt_for_fn (decl, optimize_size))
3153 return true;
3154 if (frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
3155 return true;
3156 else
3157 return false;
3158 }
3159
3160 /* Return symtab_node for NODE or create one if it is not present
3161 in symtab. */
3162
3163 inline symtab_node *
3164 symtab_node::get_create (tree node)
3165 {
3166 if (TREE_CODE (node) == VAR_DECL)
3167 return varpool_node::get_create (node);
3168 else
3169 return cgraph_node::get_create (node);
3170 }
3171
3172 /* Return availability of NODE when referenced from REF. */
3173
3174 inline enum availability
3175 symtab_node::get_availability (symtab_node *ref)
3176 {
3177 if (is_a <cgraph_node *> (this))
3178 return dyn_cast <cgraph_node *> (this)->get_availability (ref);
3179 else
3180 return dyn_cast <varpool_node *> (this)->get_availability (ref);
3181 }
3182
3183 /* Call calback on symtab node and aliases associated to this node.
3184 When INCLUDE_OVERWRITABLE is false, overwritable symbols are skipped. */
3185
3186 inline bool
3187 symtab_node::call_for_symbol_and_aliases (bool (*callback) (symtab_node *,
3188 void *),
3189 void *data,
3190 bool include_overwritable)
3191 {
3192 if (include_overwritable
3193 || get_availability () > AVAIL_INTERPOSABLE)
3194 {
3195 if (callback (this, data))
3196 return true;
3197 }
3198 if (has_aliases_p ())
3199 return call_for_symbol_and_aliases_1 (callback, data, include_overwritable);
3200 return false;
3201 }
3202
3203 /* Call callback on function and aliases associated to the function.
3204 When INCLUDE_OVERWRITABLE is false, overwritable symbols are
3205 skipped. */
3206
3207 inline bool
3208 cgraph_node::call_for_symbol_and_aliases (bool (*callback) (cgraph_node *,
3209 void *),
3210 void *data,
3211 bool include_overwritable)
3212 {
3213 if (include_overwritable
3214 || get_availability () > AVAIL_INTERPOSABLE)
3215 {
3216 if (callback (this, data))
3217 return true;
3218 }
3219 if (has_aliases_p ())
3220 return call_for_symbol_and_aliases_1 (callback, data, include_overwritable);
3221 return false;
3222 }
3223
3224 /* Call calback on varpool symbol and aliases associated to varpool symbol.
3225 When INCLUDE_OVERWRITABLE is false, overwritable symbols are
3226 skipped. */
3227
3228 inline bool
3229 varpool_node::call_for_symbol_and_aliases (bool (*callback) (varpool_node *,
3230 void *),
3231 void *data,
3232 bool include_overwritable)
3233 {
3234 if (include_overwritable
3235 || get_availability () > AVAIL_INTERPOSABLE)
3236 {
3237 if (callback (this, data))
3238 return true;
3239 }
3240 if (has_aliases_p ())
3241 return call_for_symbol_and_aliases_1 (callback, data, include_overwritable);
3242 return false;
3243 }
3244
3245 /* Return true if refernece may be used in address compare. */
3246
3247 inline bool
3248 ipa_ref::address_matters_p ()
3249 {
3250 if (use != IPA_REF_ADDR)
3251 return false;
3252 /* Addresses taken from virtual tables are never compared. */
3253 if (is_a <varpool_node *> (referring)
3254 && DECL_VIRTUAL_P (referring->decl))
3255 return false;
3256 return referred->address_can_be_compared_p ();
3257 }
3258
3259 /* Build polymorphic call context for indirect call E. */
3260
3261 inline
3262 ipa_polymorphic_call_context::ipa_polymorphic_call_context (cgraph_edge *e)
3263 {
3264 gcc_checking_assert (e->indirect_info->polymorphic);
3265 *this = e->indirect_info->context;
3266 }
3267
3268 /* Build empty "I know nothing" context. */
3269
3270 inline
3271 ipa_polymorphic_call_context::ipa_polymorphic_call_context ()
3272 {
3273 clear_speculation ();
3274 clear_outer_type ();
3275 invalid = false;
3276 }
3277
3278 /* Make context non-speculative. */
3279
3280 inline void
3281 ipa_polymorphic_call_context::clear_speculation ()
3282 {
3283 speculative_outer_type = NULL;
3284 speculative_offset = 0;
3285 speculative_maybe_derived_type = false;
3286 }
3287
3288 /* Produce context specifying all derrived types of OTR_TYPE. If OTR_TYPE is
3289 NULL, the context is set to dummy "I know nothing" setting. */
3290
3291 inline void
3292 ipa_polymorphic_call_context::clear_outer_type (tree otr_type)
3293 {
3294 outer_type = otr_type ? TYPE_MAIN_VARIANT (otr_type) : NULL;
3295 offset = 0;
3296 maybe_derived_type = true;
3297 maybe_in_construction = true;
3298 dynamic = true;
3299 }
3300
3301 /* Adjust all offsets in contexts by OFF bits. */
3302
3303 inline void
3304 ipa_polymorphic_call_context::offset_by (HOST_WIDE_INT off)
3305 {
3306 if (outer_type)
3307 offset += off;
3308 if (speculative_outer_type)
3309 speculative_offset += off;
3310 }
3311
3312 /* Return TRUE if context is fully useless. */
3313
3314 inline bool
3315 ipa_polymorphic_call_context::useless_p () const
3316 {
3317 return (!outer_type && !speculative_outer_type);
3318 }
3319
3320 /* Return true if NODE is local. Instrumentation clones are counted as local
3321 only when original function is local. */
3322
3323 static inline bool
3324 cgraph_local_p (cgraph_node *node)
3325 {
3326 if (!node->instrumentation_clone || !node->instrumented_version)
3327 return node->local.local;
3328
3329 return node->local.local && node->instrumented_version->local.local;
3330 }
3331
3332 /* When using fprintf (or similar), problems can arise with
3333 transient generated strings. Many string-generation APIs
3334 only support one result being alive at once (e.g. by
3335 returning a pointer to a statically-allocated buffer).
3336
3337 If there is more than one generated string within one
3338 fprintf call: the first string gets evicted or overwritten
3339 by the second, before fprintf is fully evaluated.
3340 See e.g. PR/53136.
3341
3342 This function provides a workaround for this, by providing
3343 a simple way to create copies of these transient strings,
3344 without the need to have explicit cleanup:
3345
3346 fprintf (dumpfile, "string 1: %s string 2:%s\n",
3347 xstrdup_for_dump (EXPR_1),
3348 xstrdup_for_dump (EXPR_2));
3349
3350 This is actually a simple wrapper around ggc_strdup, but
3351 the name documents the intent. We require that no GC can occur
3352 within the fprintf call. */
3353
3354 static inline const char *
3355 xstrdup_for_dump (const char *transient_str)
3356 {
3357 return ggc_strdup (transient_str);
3358 }
3359
3360 #endif /* GCC_CGRAPH_H */