[Ada] Argument_String_To_List creates empty items from whitespace
[gcc.git] / gcc / cgraph.h
1 /* Callgraph handling code.
2 Copyright (C) 2003-2018 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #ifndef GCC_CGRAPH_H
22 #define GCC_CGRAPH_H
23
24 #include "profile-count.h"
25 #include "ipa-ref.h"
26 #include "plugin-api.h"
27
28 class ipa_opt_pass_d;
29 typedef ipa_opt_pass_d *ipa_opt_pass;
30
31 /* Symbol table consists of functions and variables.
32 TODO: add labels and CONST_DECLs. */
33 enum symtab_type
34 {
35 SYMTAB_SYMBOL,
36 SYMTAB_FUNCTION,
37 SYMTAB_VARIABLE
38 };
39
40 /* Section names are stored as reference counted strings in GGC safe hashtable
41 (to make them survive through PCH). */
42
43 struct GTY((for_user)) section_hash_entry
44 {
45 int ref_count;
46 char *name; /* As long as this datastructure stays in GGC, we can not put
47 string at the tail of structure of GGC dies in horrible
48 way */
49 };
50
51 struct section_name_hasher : ggc_ptr_hash<section_hash_entry>
52 {
53 typedef const char *compare_type;
54
55 static hashval_t hash (section_hash_entry *);
56 static bool equal (section_hash_entry *, const char *);
57 };
58
59 enum availability
60 {
61 /* Not yet set by cgraph_function_body_availability. */
62 AVAIL_UNSET,
63 /* Function body/variable initializer is unknown. */
64 AVAIL_NOT_AVAILABLE,
65 /* Function body/variable initializer is known but might be replaced
66 by a different one from other compilation unit and thus needs to
67 be dealt with a care. Like AVAIL_NOT_AVAILABLE it can have
68 arbitrary side effects on escaping variables and functions, while
69 like AVAILABLE it might access static variables. */
70 AVAIL_INTERPOSABLE,
71 /* Function body/variable initializer is known and will be used in final
72 program. */
73 AVAIL_AVAILABLE,
74 /* Function body/variable initializer is known and all it's uses are
75 explicitly visible within current unit (ie it's address is never taken and
76 it is not exported to other units). Currently used only for functions. */
77 AVAIL_LOCAL
78 };
79
80 /* Classification of symbols WRT partitioning. */
81 enum symbol_partitioning_class
82 {
83 /* External declarations are ignored by partitioning algorithms and they are
84 added into the boundary later via compute_ltrans_boundary. */
85 SYMBOL_EXTERNAL,
86 /* Partitioned symbols are pur into one of partitions. */
87 SYMBOL_PARTITION,
88 /* Duplicated symbols (such as comdat or constant pool references) are
89 copied into every node needing them via add_symbol_to_partition. */
90 SYMBOL_DUPLICATE
91 };
92
93 /* Base of all entries in the symbol table.
94 The symtab_node is inherited by cgraph and varpol nodes. */
95 class GTY((desc ("%h.type"), tag ("SYMTAB_SYMBOL"),
96 chain_next ("%h.next"), chain_prev ("%h.previous")))
97 symtab_node
98 {
99 public:
100 friend class symbol_table;
101
102 /* Return name. */
103 const char *name () const;
104
105 /* Return dump name. */
106 const char *dump_name () const;
107
108 /* Return asm name. */
109 const char *asm_name () const;
110
111 /* Return dump name with assembler name. */
112 const char *dump_asm_name () const;
113
114 /* Add node into symbol table. This function is not used directly, but via
115 cgraph/varpool node creation routines. */
116 void register_symbol (void);
117
118 /* Remove symbol from symbol table. */
119 void remove (void);
120
121 /* Dump symtab node to F. */
122 void dump (FILE *f);
123
124 /* Dump symtab node to stderr. */
125 void DEBUG_FUNCTION debug (void);
126
127 /* Verify consistency of node. */
128 void DEBUG_FUNCTION verify (void);
129
130 /* Return ipa reference from this symtab_node to
131 REFERED_NODE or REFERED_VARPOOL_NODE. USE_TYPE specify type
132 of the use and STMT the statement (if it exists). */
133 ipa_ref *create_reference (symtab_node *referred_node,
134 enum ipa_ref_use use_type);
135
136 /* Return ipa reference from this symtab_node to
137 REFERED_NODE or REFERED_VARPOOL_NODE. USE_TYPE specify type
138 of the use and STMT the statement (if it exists). */
139 ipa_ref *create_reference (symtab_node *referred_node,
140 enum ipa_ref_use use_type, gimple *stmt);
141
142 /* If VAL is a reference to a function or a variable, add a reference from
143 this symtab_node to the corresponding symbol table node. Return the new
144 reference or NULL if none was created. */
145 ipa_ref *maybe_create_reference (tree val, gimple *stmt);
146
147 /* Clone all references from symtab NODE to this symtab_node. */
148 void clone_references (symtab_node *node);
149
150 /* Remove all stmt references in non-speculative references.
151 Those are not maintained during inlining & clonning.
152 The exception are speculative references that are updated along
153 with callgraph edges associated with them. */
154 void clone_referring (symtab_node *node);
155
156 /* Clone reference REF to this symtab_node and set its stmt to STMT. */
157 ipa_ref *clone_reference (ipa_ref *ref, gimple *stmt);
158
159 /* Find the structure describing a reference to REFERRED_NODE
160 and associated with statement STMT. */
161 ipa_ref *find_reference (symtab_node *referred_node, gimple *stmt,
162 unsigned int lto_stmt_uid);
163
164 /* Remove all references that are associated with statement STMT. */
165 void remove_stmt_references (gimple *stmt);
166
167 /* Remove all stmt references in non-speculative references.
168 Those are not maintained during inlining & clonning.
169 The exception are speculative references that are updated along
170 with callgraph edges associated with them. */
171 void clear_stmts_in_references (void);
172
173 /* Remove all references in ref list. */
174 void remove_all_references (void);
175
176 /* Remove all referring items in ref list. */
177 void remove_all_referring (void);
178
179 /* Dump references in ref list to FILE. */
180 void dump_references (FILE *file);
181
182 /* Dump referring in list to FILE. */
183 void dump_referring (FILE *);
184
185 /* Get number of references for this node. */
186 inline unsigned num_references (void)
187 {
188 return ref_list.references ? ref_list.references->length () : 0;
189 }
190
191 /* Iterates I-th reference in the list, REF is also set. */
192 ipa_ref *iterate_reference (unsigned i, ipa_ref *&ref);
193
194 /* Iterates I-th referring item in the list, REF is also set. */
195 ipa_ref *iterate_referring (unsigned i, ipa_ref *&ref);
196
197 /* Iterates I-th referring alias item in the list, REF is also set. */
198 ipa_ref *iterate_direct_aliases (unsigned i, ipa_ref *&ref);
199
200 /* Return true if symtab node and TARGET represents
201 semantically equivalent symbols. */
202 bool semantically_equivalent_p (symtab_node *target);
203
204 /* Classify symbol symtab node for partitioning. */
205 enum symbol_partitioning_class get_partitioning_class (void);
206
207 /* Return comdat group. */
208 tree get_comdat_group ()
209 {
210 return x_comdat_group;
211 }
212
213 /* Return comdat group as identifier_node. */
214 tree get_comdat_group_id ()
215 {
216 if (x_comdat_group && TREE_CODE (x_comdat_group) != IDENTIFIER_NODE)
217 x_comdat_group = DECL_ASSEMBLER_NAME (x_comdat_group);
218 return x_comdat_group;
219 }
220
221 /* Set comdat group. */
222 void set_comdat_group (tree group)
223 {
224 gcc_checking_assert (!group || TREE_CODE (group) == IDENTIFIER_NODE
225 || DECL_P (group));
226 x_comdat_group = group;
227 }
228
229 /* Return section as string. */
230 const char * get_section ()
231 {
232 if (!x_section)
233 return NULL;
234 return x_section->name;
235 }
236
237 /* Remove node from same comdat group. */
238 void remove_from_same_comdat_group (void);
239
240 /* Add this symtab_node to the same comdat group that OLD is in. */
241 void add_to_same_comdat_group (symtab_node *old_node);
242
243 /* Dissolve the same_comdat_group list in which NODE resides. */
244 void dissolve_same_comdat_group_list (void);
245
246 /* Return true when symtab_node is known to be used from other (non-LTO)
247 object file. Known only when doing LTO via linker plugin. */
248 bool used_from_object_file_p (void);
249
250 /* Walk the alias chain to return the symbol NODE is alias of.
251 If NODE is not an alias, return NODE.
252 When AVAILABILITY is non-NULL, get minimal availability in the chain.
253 When REF is non-NULL, assume that reference happens in symbol REF
254 when determining the availability. */
255 symtab_node *ultimate_alias_target (enum availability *avail = NULL,
256 struct symtab_node *ref = NULL);
257
258 /* Return next reachable static symbol with initializer after NODE. */
259 inline symtab_node *next_defined_symbol (void);
260
261 /* Add reference recording that symtab node is alias of TARGET.
262 If TRANSPARENT is true make the alias to be transparent alias.
263 The function can fail in the case of aliasing cycles; in this case
264 it returns false. */
265 bool resolve_alias (symtab_node *target, bool transparent = false);
266
267 /* C++ FE sometimes change linkage flags after producing same
268 body aliases. */
269 void fixup_same_cpp_alias_visibility (symtab_node *target);
270
271 /* Call callback on symtab node and aliases associated to this node.
272 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
273 skipped. */
274 bool call_for_symbol_and_aliases (bool (*callback) (symtab_node *, void *),
275 void *data,
276 bool include_overwrite);
277
278 /* If node can not be interposable by static or dynamic linker to point to
279 different definition, return this symbol. Otherwise look for alias with
280 such property and if none exists, introduce new one. */
281 symtab_node *noninterposable_alias (void);
282
283 /* Return node that alias is aliasing. */
284 inline symtab_node *get_alias_target (void);
285
286 /* Set section for symbol and its aliases. */
287 void set_section (const char *section);
288
289 /* Set section, do not recurse into aliases.
290 When one wants to change section of symbol and its aliases,
291 use set_section. */
292 void set_section_for_node (const char *section);
293
294 /* Set initialization priority to PRIORITY. */
295 void set_init_priority (priority_type priority);
296
297 /* Return the initialization priority. */
298 priority_type get_init_priority ();
299
300 /* Return availability of NODE when referenced from REF. */
301 enum availability get_availability (symtab_node *ref = NULL);
302
303 /* Return true if NODE binds to current definition in final executable
304 when referenced from REF. If REF is NULL return conservative value
305 for any reference. */
306 bool binds_to_current_def_p (symtab_node *ref = NULL);
307
308 /* Make DECL local. */
309 void make_decl_local (void);
310
311 /* Copy visibility from N. */
312 void copy_visibility_from (symtab_node *n);
313
314 /* Return desired alignment of the definition. This is NOT alignment useful
315 to access THIS, because THIS may be interposable and DECL_ALIGN should
316 be used instead. It however must be guaranteed when output definition
317 of THIS. */
318 unsigned int definition_alignment ();
319
320 /* Return true if alignment can be increased. */
321 bool can_increase_alignment_p ();
322
323 /* Increase alignment of symbol to ALIGN. */
324 void increase_alignment (unsigned int align);
325
326 /* Return true if list contains an alias. */
327 bool has_aliases_p (void);
328
329 /* Return true when the symbol is real symbol, i.e. it is not inline clone
330 or abstract function kept for debug info purposes only. */
331 bool real_symbol_p (void);
332
333 /* Return true when the symbol needs to be output to the LTO symbol table. */
334 bool output_to_lto_symbol_table_p (void);
335
336 /* Determine if symbol declaration is needed. That is, visible to something
337 either outside this translation unit, something magic in the system
338 configury. This function is used just during symbol creation. */
339 bool needed_p (void);
340
341 /* Return true if this symbol is a function from the C frontend specified
342 directly in RTL form (with "__RTL"). */
343 bool native_rtl_p () const;
344
345 /* Return true when there are references to the node. */
346 bool referred_to_p (bool include_self = true);
347
348 /* Return true if symbol can be discarded by linker from the binary.
349 Assume that symbol is used (so there is no need to take into account
350 garbage collecting linkers)
351
352 This can happen for comdats, commons and weaks when they are previaled
353 by other definition at static linking time. */
354 inline bool
355 can_be_discarded_p (void)
356 {
357 return (DECL_EXTERNAL (decl)
358 || ((get_comdat_group ()
359 || DECL_COMMON (decl)
360 || (DECL_SECTION_NAME (decl) && DECL_WEAK (decl)))
361 && ((resolution != LDPR_PREVAILING_DEF
362 && resolution != LDPR_PREVAILING_DEF_IRONLY_EXP)
363 || flag_incremental_link)
364 && resolution != LDPR_PREVAILING_DEF_IRONLY));
365 }
366
367 /* Return true if NODE is local to a particular COMDAT group, and must not
368 be named from outside the COMDAT. This is used for C++ decloned
369 constructors. */
370 inline bool comdat_local_p (void)
371 {
372 return (same_comdat_group && !TREE_PUBLIC (decl));
373 }
374
375 /* Return true if ONE and TWO are part of the same COMDAT group. */
376 inline bool in_same_comdat_group_p (symtab_node *target);
377
378 /* Return true if symbol is known to be nonzero. */
379 bool nonzero_address ();
380
381 /* Return 0 if symbol is known to have different address than S2,
382 Return 1 if symbol is known to have same address as S2,
383 return 2 otherwise.
384
385 If MEMORY_ACCESSED is true, assume that both memory pointer to THIS
386 and S2 is going to be accessed. This eliminates the situations when
387 either THIS or S2 is NULL and is seful for comparing bases when deciding
388 about memory aliasing. */
389 int equal_address_to (symtab_node *s2, bool memory_accessed = false);
390
391 /* Return true if symbol's address may possibly be compared to other
392 symbol's address. */
393 bool address_matters_p ();
394
395 /* Return true if NODE's address can be compared. This use properties
396 of NODE only and does not look if the address is actually taken in
397 interesting way. For that use ADDRESS_MATTERS_P instead. */
398 bool address_can_be_compared_p (void);
399
400 /* Return symbol table node associated with DECL, if any,
401 and NULL otherwise. */
402 static inline symtab_node *get (const_tree decl)
403 {
404 /* Check that we are called for sane type of object - functions
405 and static or external variables. */
406 gcc_checking_assert (TREE_CODE (decl) == FUNCTION_DECL
407 || (TREE_CODE (decl) == VAR_DECL
408 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)
409 || in_lto_p)));
410 /* Check that the mapping is sane - perhaps this check can go away,
411 but at the moment frontends tends to corrupt the mapping by calling
412 memcpy/memset on the tree nodes. */
413 gcc_checking_assert (!decl->decl_with_vis.symtab_node
414 || decl->decl_with_vis.symtab_node->decl == decl);
415 return decl->decl_with_vis.symtab_node;
416 }
417
418 /* Try to find a symtab node for declaration DECL and if it does not
419 exist or if it corresponds to an inline clone, create a new one. */
420 static inline symtab_node * get_create (tree node);
421
422 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
423 Return NULL if there's no such node. */
424 static symtab_node *get_for_asmname (const_tree asmname);
425
426 /* Verify symbol table for internal consistency. */
427 static DEBUG_FUNCTION void verify_symtab_nodes (void);
428
429 /* Perform internal consistency checks, if they are enabled. */
430 static inline void checking_verify_symtab_nodes (void);
431
432 /* Type of the symbol. */
433 ENUM_BITFIELD (symtab_type) type : 8;
434
435 /* The symbols resolution. */
436 ENUM_BITFIELD (ld_plugin_symbol_resolution) resolution : 8;
437
438 /*** Flags representing the symbol type. ***/
439
440 /* True when symbol corresponds to a definition in current unit.
441 set via finalize_function or finalize_decl */
442 unsigned definition : 1;
443 /* True when symbol is an alias.
444 Set by ssemble_alias. */
445 unsigned alias : 1;
446 /* When true the alias is translated into its target symbol either by GCC
447 or assembler (it also may just be a duplicate declaration of the same
448 linker name).
449
450 Currently transparent aliases come in three different flavors
451 - aliases having the same assembler name as their target (aka duplicated
452 declarations). In this case the assembler names compare via
453 assembler_names_equal_p and weakref is false
454 - aliases that are renamed at a time being output to final file
455 by varasm.c. For those DECL_ASSEMBLER_NAME have
456 IDENTIFIER_TRANSPARENT_ALIAS set and thus also their assembler
457 name must be unique.
458 Weakrefs belong to this cateogry when we target assembler without
459 .weakref directive.
460 - weakrefs that are renamed by assembler via .weakref directive.
461 In this case the alias may or may not be definition (depending if
462 target declaration was seen by the compiler), weakref is set.
463 Unless we are before renaming statics, assembler names are different.
464
465 Given that we now support duplicate declarations, the second option is
466 redundant and will be removed. */
467 unsigned transparent_alias : 1;
468 /* True when alias is a weakref. */
469 unsigned weakref : 1;
470 /* C++ frontend produce same body aliases and extra name aliases for
471 virtual functions and vtables that are obviously equivalent.
472 Those aliases are bit special, especially because C++ frontend
473 visibility code is so ugly it can not get them right at first time
474 and their visibility needs to be copied from their "masters" at
475 the end of parsing. */
476 unsigned cpp_implicit_alias : 1;
477 /* Set once the definition was analyzed. The list of references and
478 other properties are built during analysis. */
479 unsigned analyzed : 1;
480 /* Set for write-only variables. */
481 unsigned writeonly : 1;
482 /* Visibility of symbol was used for further optimization; do not
483 permit further changes. */
484 unsigned refuse_visibility_changes : 1;
485
486 /*** Visibility and linkage flags. ***/
487
488 /* Set when function is visible by other units. */
489 unsigned externally_visible : 1;
490 /* Don't reorder to other symbols having this set. */
491 unsigned no_reorder : 1;
492 /* The symbol will be assumed to be used in an invisible way (like
493 by an toplevel asm statement). */
494 unsigned force_output : 1;
495 /* Like FORCE_OUTPUT, but in the case it is ABI requiring the symbol to be
496 exported. Unlike FORCE_OUTPUT this flag gets cleared to symbols promoted
497 to static and it does not inhibit optimization. */
498 unsigned forced_by_abi : 1;
499 /* True when the name is known to be unique and thus it does not need mangling. */
500 unsigned unique_name : 1;
501 /* Specify whether the section was set by user or by
502 compiler via -ffunction-sections. */
503 unsigned implicit_section : 1;
504 /* True when body and other characteristics have been removed by
505 symtab_remove_unreachable_nodes. */
506 unsigned body_removed : 1;
507
508 /*** WHOPR Partitioning flags.
509 These flags are used at ltrans stage when only part of the callgraph is
510 available. ***/
511
512 /* Set when variable is used from other LTRANS partition. */
513 unsigned used_from_other_partition : 1;
514 /* Set when function is available in the other LTRANS partition.
515 During WPA output it is used to mark nodes that are present in
516 multiple partitions. */
517 unsigned in_other_partition : 1;
518
519
520
521 /*** other flags. ***/
522
523 /* Set when symbol has address taken. */
524 unsigned address_taken : 1;
525 /* Set when init priority is set. */
526 unsigned in_init_priority_hash : 1;
527
528 /* Set when symbol needs to be streamed into LTO bytecode for LTO, or in case
529 of offloading, for separate compilation for a different target. */
530 unsigned need_lto_streaming : 1;
531
532 /* Set when symbol can be streamed into bytecode for offloading. */
533 unsigned offloadable : 1;
534
535 /* Set when symbol is an IFUNC resolver. */
536 unsigned ifunc_resolver : 1;
537
538
539 /* Ordering of all symtab entries. */
540 int order;
541
542 /* Declaration representing the symbol. */
543 tree decl;
544
545 /* Linked list of symbol table entries starting with symtab_nodes. */
546 symtab_node *next;
547 symtab_node *previous;
548
549 /* Linked list of symbols with the same asm name. There may be multiple
550 entries for single symbol name during LTO, because symbols are renamed
551 only after partitioning.
552
553 Because inline clones are kept in the assembler name has, they also produce
554 duplicate entries.
555
556 There are also several long standing bugs where frontends and builtin
557 code produce duplicated decls. */
558 symtab_node *next_sharing_asm_name;
559 symtab_node *previous_sharing_asm_name;
560
561 /* Circular list of nodes in the same comdat group if non-NULL. */
562 symtab_node *same_comdat_group;
563
564 /* Vectors of referring and referenced entities. */
565 ipa_ref_list ref_list;
566
567 /* Alias target. May be either DECL pointer or ASSEMBLER_NAME pointer
568 depending to what was known to frontend on the creation time.
569 Once alias is resolved, this pointer become NULL. */
570 tree alias_target;
571
572 /* File stream where this node is being written to. */
573 struct lto_file_decl_data * lto_file_data;
574
575 PTR GTY ((skip)) aux;
576
577 /* Comdat group the symbol is in. Can be private if GGC allowed that. */
578 tree x_comdat_group;
579
580 /* Section name. Again can be private, if allowed. */
581 section_hash_entry *x_section;
582
583 protected:
584 /* Dump base fields of symtab nodes to F. Not to be used directly. */
585 void dump_base (FILE *);
586
587 /* Verify common part of symtab node. */
588 bool DEBUG_FUNCTION verify_base (void);
589
590 /* Remove node from symbol table. This function is not used directly, but via
591 cgraph/varpool node removal routines. */
592 void unregister (void);
593
594 /* Return the initialization and finalization priority information for
595 DECL. If there is no previous priority information, a freshly
596 allocated structure is returned. */
597 struct symbol_priority_map *priority_info (void);
598
599 /* Worker for call_for_symbol_and_aliases_1. */
600 bool call_for_symbol_and_aliases_1 (bool (*callback) (symtab_node *, void *),
601 void *data,
602 bool include_overwrite);
603 private:
604 /* Worker for set_section. */
605 static bool set_section (symtab_node *n, void *s);
606
607 /* Worker for symtab_resolve_alias. */
608 static bool set_implicit_section (symtab_node *n, void *);
609
610 /* Worker searching noninterposable alias. */
611 static bool noninterposable_alias (symtab_node *node, void *data);
612
613 /* Worker for ultimate_alias_target. */
614 symtab_node *ultimate_alias_target_1 (enum availability *avail = NULL,
615 symtab_node *ref = NULL);
616
617 /* Get dump name with normal or assembly name. */
618 const char *get_dump_name (bool asm_name_p) const;
619 };
620
621 inline void
622 symtab_node::checking_verify_symtab_nodes (void)
623 {
624 if (flag_checking)
625 symtab_node::verify_symtab_nodes ();
626 }
627
628 /* Walk all aliases for NODE. */
629 #define FOR_EACH_ALIAS(node, alias) \
630 for (unsigned x_i = 0; node->iterate_direct_aliases (x_i, alias); x_i++)
631
632 /* This is the information that is put into the cgraph local structure
633 to recover a function. */
634 struct lto_file_decl_data;
635
636 extern const char * const cgraph_availability_names[];
637 extern const char * const ld_plugin_symbol_resolution_names[];
638 extern const char * const tls_model_names[];
639
640 /* Sub-structure of cgraph_node. Holds information about thunk, used only for
641 same body aliases.
642
643 Thunks are basically wrappers around methods which are introduced in case
644 of multiple inheritance in order to adjust the value of the "this" pointer
645 or of the returned value.
646
647 In the case of this-adjusting thunks, each back-end can override the
648 can_output_mi_thunk/output_mi_thunk target hooks to generate a minimal thunk
649 (with a tail call for instance) directly as assembly. For the default hook
650 or for the case where the can_output_mi_thunk hooks return false, the thunk
651 is gimplified and lowered using the regular machinery. */
652
653 struct GTY(()) cgraph_thunk_info {
654 /* Offset used to adjust "this". */
655 HOST_WIDE_INT fixed_offset;
656
657 /* Offset in the virtual table to get the offset to adjust "this". Valid iff
658 VIRTUAL_OFFSET_P is true. */
659 HOST_WIDE_INT virtual_value;
660
661 /* Thunk target, i.e. the method that this thunk wraps. Depending on the
662 TARGET_USE_LOCAL_THUNK_ALIAS_P macro, this may have to be a new alias. */
663 tree alias;
664
665 /* Nonzero for a "this" adjusting thunk and zero for a result adjusting
666 thunk. */
667 bool this_adjusting;
668
669 /* If true, this thunk is what we call a virtual thunk. In this case:
670 * for this-adjusting thunks, after the FIXED_OFFSET based adjustment is
671 done, add to the result the offset found in the vtable at:
672 vptr + VIRTUAL_VALUE
673 * for result-adjusting thunks, the FIXED_OFFSET adjustment is done after
674 the virtual one. */
675 bool virtual_offset_p;
676
677 /* ??? True for special kind of thunks, seems related to instrumentation. */
678 bool add_pointer_bounds_args;
679
680 /* Set to true when alias node (the cgraph_node to which this struct belong)
681 is a thunk. Access to any other fields is invalid if this is false. */
682 bool thunk_p;
683 };
684
685 /* Information about the function collected locally.
686 Available after function is analyzed. */
687
688 struct GTY(()) cgraph_local_info {
689 /* Set when function is visible in current compilation unit only and
690 its address is never taken. */
691 unsigned local : 1;
692
693 /* False when there is something makes versioning impossible. */
694 unsigned versionable : 1;
695
696 /* False when function calling convention and signature can not be changed.
697 This is the case when __builtin_apply_args is used. */
698 unsigned can_change_signature : 1;
699
700 /* True when the function has been originally extern inline, but it is
701 redefined now. */
702 unsigned redefined_extern_inline : 1;
703
704 /* True if the function may enter serial irrevocable mode. */
705 unsigned tm_may_enter_irr : 1;
706 };
707
708 /* Information about the function that needs to be computed globally
709 once compilation is finished. Available only with -funit-at-a-time. */
710
711 struct GTY(()) cgraph_global_info {
712 /* For inline clones this points to the function they will be
713 inlined into. */
714 cgraph_node *inlined_to;
715 };
716
717 /* Represent which DECL tree (or reference to such tree)
718 will be replaced by another tree while versioning. */
719 struct GTY(()) ipa_replace_map
720 {
721 /* The tree that will be replaced. */
722 tree old_tree;
723 /* The new (replacing) tree. */
724 tree new_tree;
725 /* Parameter number to replace, when old_tree is NULL. */
726 int parm_num;
727 /* True when a substitution should be done, false otherwise. */
728 bool replace_p;
729 /* True when we replace a reference to old_tree. */
730 bool ref_p;
731 };
732
733 struct GTY(()) cgraph_clone_info
734 {
735 vec<ipa_replace_map *, va_gc> *tree_map;
736 bitmap args_to_skip;
737 bitmap combined_args_to_skip;
738 };
739
740 enum cgraph_simd_clone_arg_type
741 {
742 SIMD_CLONE_ARG_TYPE_VECTOR,
743 SIMD_CLONE_ARG_TYPE_UNIFORM,
744 /* These are only for integer/pointer arguments passed by value. */
745 SIMD_CLONE_ARG_TYPE_LINEAR_CONSTANT_STEP,
746 SIMD_CLONE_ARG_TYPE_LINEAR_VARIABLE_STEP,
747 /* These 6 are only for reference type arguments or arguments passed
748 by reference. */
749 SIMD_CLONE_ARG_TYPE_LINEAR_REF_CONSTANT_STEP,
750 SIMD_CLONE_ARG_TYPE_LINEAR_REF_VARIABLE_STEP,
751 SIMD_CLONE_ARG_TYPE_LINEAR_UVAL_CONSTANT_STEP,
752 SIMD_CLONE_ARG_TYPE_LINEAR_UVAL_VARIABLE_STEP,
753 SIMD_CLONE_ARG_TYPE_LINEAR_VAL_CONSTANT_STEP,
754 SIMD_CLONE_ARG_TYPE_LINEAR_VAL_VARIABLE_STEP,
755 SIMD_CLONE_ARG_TYPE_MASK
756 };
757
758 /* Function arguments in the original function of a SIMD clone.
759 Supplementary data for `struct simd_clone'. */
760
761 struct GTY(()) cgraph_simd_clone_arg {
762 /* Original function argument as it originally existed in
763 DECL_ARGUMENTS. */
764 tree orig_arg;
765
766 /* orig_arg's function (or for extern functions type from
767 TYPE_ARG_TYPES). */
768 tree orig_type;
769
770 /* If argument is a vector, this holds the vector version of
771 orig_arg that after adjusting the argument types will live in
772 DECL_ARGUMENTS. Otherwise, this is NULL.
773
774 This basically holds:
775 vector(simdlen) __typeof__(orig_arg) new_arg. */
776 tree vector_arg;
777
778 /* vector_arg's type (or for extern functions new vector type. */
779 tree vector_type;
780
781 /* If argument is a vector, this holds the array where the simd
782 argument is held while executing the simd clone function. This
783 is a local variable in the cloned function. Its content is
784 copied from vector_arg upon entry to the clone.
785
786 This basically holds:
787 __typeof__(orig_arg) simd_array[simdlen]. */
788 tree simd_array;
789
790 /* A SIMD clone's argument can be either linear (constant or
791 variable), uniform, or vector. */
792 enum cgraph_simd_clone_arg_type arg_type;
793
794 /* For arg_type SIMD_CLONE_ARG_TYPE_LINEAR_*CONSTANT_STEP this is
795 the constant linear step, if arg_type is
796 SIMD_CLONE_ARG_TYPE_LINEAR_*VARIABLE_STEP, this is index of
797 the uniform argument holding the step, otherwise 0. */
798 HOST_WIDE_INT linear_step;
799
800 /* Variable alignment if available, otherwise 0. */
801 unsigned int alignment;
802 };
803
804 /* Specific data for a SIMD function clone. */
805
806 struct GTY(()) cgraph_simd_clone {
807 /* Number of words in the SIMD lane associated with this clone. */
808 unsigned int simdlen;
809
810 /* Number of annotated function arguments in `args'. This is
811 usually the number of named arguments in FNDECL. */
812 unsigned int nargs;
813
814 /* Max hardware vector size in bits for integral vectors. */
815 unsigned int vecsize_int;
816
817 /* Max hardware vector size in bits for floating point vectors. */
818 unsigned int vecsize_float;
819
820 /* Machine mode of the mask argument(s), if they are to be passed
821 as bitmasks in integer argument(s). VOIDmode if masks are passed
822 as vectors of characteristic type. */
823 machine_mode mask_mode;
824
825 /* The mangling character for a given vector size. This is used
826 to determine the ISA mangling bit as specified in the Intel
827 Vector ABI. */
828 unsigned char vecsize_mangle;
829
830 /* True if this is the masked, in-branch version of the clone,
831 otherwise false. */
832 unsigned int inbranch : 1;
833
834 /* Doubly linked list of SIMD clones. */
835 cgraph_node *prev_clone, *next_clone;
836
837 /* Original cgraph node the SIMD clones were created for. */
838 cgraph_node *origin;
839
840 /* Annotated function arguments for the original function. */
841 cgraph_simd_clone_arg GTY((length ("%h.nargs"))) args[1];
842 };
843
844 /* Function Multiversioning info. */
845 struct GTY((for_user)) cgraph_function_version_info {
846 /* The cgraph_node for which the function version info is stored. */
847 cgraph_node *this_node;
848 /* Chains all the semantically identical function versions. The
849 first function in this chain is the version_info node of the
850 default function. */
851 cgraph_function_version_info *prev;
852 /* If this version node corresponds to a dispatcher for function
853 versions, this points to the version info node of the default
854 function, the first node in the chain. */
855 cgraph_function_version_info *next;
856 /* If this node corresponds to a function version, this points
857 to the dispatcher function decl, which is the function that must
858 be called to execute the right function version at run-time.
859
860 If this cgraph node is a dispatcher (if dispatcher_function is
861 true, in the cgraph_node struct) for function versions, this
862 points to resolver function, which holds the function body of the
863 dispatcher. The dispatcher decl is an alias to the resolver
864 function decl. */
865 tree dispatcher_resolver;
866 };
867
868 #define DEFCIFCODE(code, type, string) CIF_ ## code,
869 /* Reasons for inlining failures. */
870
871 enum cgraph_inline_failed_t {
872 #include "cif-code.def"
873 CIF_N_REASONS
874 };
875
876 enum cgraph_inline_failed_type_t
877 {
878 CIF_FINAL_NORMAL = 0,
879 CIF_FINAL_ERROR
880 };
881
882 struct cgraph_edge;
883
884 struct cgraph_edge_hasher : ggc_ptr_hash<cgraph_edge>
885 {
886 typedef gimple *compare_type;
887
888 static hashval_t hash (cgraph_edge *);
889 static hashval_t hash (gimple *);
890 static bool equal (cgraph_edge *, gimple *);
891 };
892
893 /* The cgraph data structure.
894 Each function decl has assigned cgraph_node listing callees and callers. */
895
896 struct GTY((tag ("SYMTAB_FUNCTION"))) cgraph_node : public symtab_node {
897 public:
898 friend class symbol_table;
899
900 /* Remove the node from cgraph and all inline clones inlined into it.
901 Skip however removal of FORBIDDEN_NODE and return true if it needs to be
902 removed. This allows to call the function from outer loop walking clone
903 tree. */
904 bool remove_symbol_and_inline_clones (cgraph_node *forbidden_node = NULL);
905
906 /* Record all references from cgraph_node that are taken
907 in statement STMT. */
908 void record_stmt_references (gimple *stmt);
909
910 /* Like cgraph_set_call_stmt but walk the clone tree and update all
911 clones sharing the same function body.
912 When WHOLE_SPECULATIVE_EDGES is true, all three components of
913 speculative edge gets updated. Otherwise we update only direct
914 call. */
915 void set_call_stmt_including_clones (gimple *old_stmt, gcall *new_stmt,
916 bool update_speculative = true);
917
918 /* Walk the alias chain to return the function cgraph_node is alias of.
919 Walk through thunk, too.
920 When AVAILABILITY is non-NULL, get minimal availability in the chain.
921 When REF is non-NULL, assume that reference happens in symbol REF
922 when determining the availability. */
923 cgraph_node *function_symbol (enum availability *avail = NULL,
924 struct symtab_node *ref = NULL);
925
926 /* Walk the alias chain to return the function cgraph_node is alias of.
927 Walk through non virtual thunks, too. Thus we return either a function
928 or a virtual thunk node.
929 When AVAILABILITY is non-NULL, get minimal availability in the chain.
930 When REF is non-NULL, assume that reference happens in symbol REF
931 when determining the availability. */
932 cgraph_node *function_or_virtual_thunk_symbol
933 (enum availability *avail = NULL,
934 struct symtab_node *ref = NULL);
935
936 /* Create node representing clone of N executed COUNT times. Decrease
937 the execution counts from original node too.
938 The new clone will have decl set to DECL that may or may not be the same
939 as decl of N.
940
941 When UPDATE_ORIGINAL is true, the counts are subtracted from the original
942 function's profile to reflect the fact that part of execution is handled
943 by node.
944 When CALL_DUPLICATOIN_HOOK is true, the ipa passes are acknowledged about
945 the new clone. Otherwise the caller is responsible for doing so later.
946
947 If the new node is being inlined into another one, NEW_INLINED_TO should be
948 the outline function the new one is (even indirectly) inlined to.
949 All hooks will see this in node's global.inlined_to, when invoked.
950 Can be NULL if the node is not inlined. SUFFIX is string that is appended
951 to the original name. */
952 cgraph_node *create_clone (tree decl, profile_count count,
953 bool update_original,
954 vec<cgraph_edge *> redirect_callers,
955 bool call_duplication_hook,
956 cgraph_node *new_inlined_to,
957 bitmap args_to_skip, const char *suffix = NULL);
958
959 /* Create callgraph node clone with new declaration. The actual body will
960 be copied later at compilation stage. */
961 cgraph_node *create_virtual_clone (vec<cgraph_edge *> redirect_callers,
962 vec<ipa_replace_map *, va_gc> *tree_map,
963 bitmap args_to_skip, const char * suffix);
964
965 /* cgraph node being removed from symbol table; see if its entry can be
966 replaced by other inline clone. */
967 cgraph_node *find_replacement (void);
968
969 /* Create a new cgraph node which is the new version of
970 callgraph node. REDIRECT_CALLERS holds the callers
971 edges which should be redirected to point to
972 NEW_VERSION. ALL the callees edges of the node
973 are cloned to the new version node. Return the new
974 version node.
975
976 If non-NULL BLOCK_TO_COPY determine what basic blocks
977 was copied to prevent duplications of calls that are dead
978 in the clone.
979
980 SUFFIX is string that is appended to the original name. */
981
982 cgraph_node *create_version_clone (tree new_decl,
983 vec<cgraph_edge *> redirect_callers,
984 bitmap bbs_to_copy,
985 const char *suffix = NULL);
986
987 /* Perform function versioning.
988 Function versioning includes copying of the tree and
989 a callgraph update (creating a new cgraph node and updating
990 its callees and callers).
991
992 REDIRECT_CALLERS varray includes the edges to be redirected
993 to the new version.
994
995 TREE_MAP is a mapping of tree nodes we want to replace with
996 new ones (according to results of prior analysis).
997
998 If non-NULL ARGS_TO_SKIP determine function parameters to remove
999 from new version.
1000 If SKIP_RETURN is true, the new version will return void.
1001 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
1002 If non_NULL NEW_ENTRY determine new entry BB of the clone.
1003
1004 Return the new version's cgraph node. */
1005 cgraph_node *create_version_clone_with_body
1006 (vec<cgraph_edge *> redirect_callers,
1007 vec<ipa_replace_map *, va_gc> *tree_map, bitmap args_to_skip,
1008 bool skip_return, bitmap bbs_to_copy, basic_block new_entry_block,
1009 const char *clone_name);
1010
1011 /* Insert a new cgraph_function_version_info node into cgraph_fnver_htab
1012 corresponding to cgraph_node. */
1013 cgraph_function_version_info *insert_new_function_version (void);
1014
1015 /* Get the cgraph_function_version_info node corresponding to node. */
1016 cgraph_function_version_info *function_version (void);
1017
1018 /* Discover all functions and variables that are trivially needed, analyze
1019 them as well as all functions and variables referred by them */
1020 void analyze (void);
1021
1022 /* Add thunk alias into callgraph. The alias declaration is ALIAS and it
1023 aliases DECL with an adjustments made into the first parameter.
1024 See comments in struct cgraph_thunk_info for detail on the parameters. */
1025 cgraph_node * create_thunk (tree alias, tree, bool this_adjusting,
1026 HOST_WIDE_INT fixed_offset,
1027 HOST_WIDE_INT virtual_value,
1028 tree virtual_offset,
1029 tree real_alias);
1030
1031
1032 /* Return node that alias is aliasing. */
1033 inline cgraph_node *get_alias_target (void);
1034
1035 /* Given function symbol, walk the alias chain to return the function node
1036 is alias of. Do not walk through thunks.
1037 When AVAILABILITY is non-NULL, get minimal availability in the chain.
1038 When REF is non-NULL, assume that reference happens in symbol REF
1039 when determining the availability. */
1040
1041 cgraph_node *ultimate_alias_target (availability *availability = NULL,
1042 symtab_node *ref = NULL);
1043
1044 /* Expand thunk NODE to gimple if possible.
1045 When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
1046 no assembler is produced.
1047 When OUTPUT_ASM_THUNK is true, also produce assembler for
1048 thunks that are not lowered. */
1049 bool expand_thunk (bool output_asm_thunks, bool force_gimple_thunk);
1050
1051 /* Call expand_thunk on all callers that are thunks and analyze those
1052 nodes that were expanded. */
1053 void expand_all_artificial_thunks ();
1054
1055 /* Assemble thunks and aliases associated to node. */
1056 void assemble_thunks_and_aliases (void);
1057
1058 /* Expand function specified by node. */
1059 void expand (void);
1060
1061 /* As an GCC extension we allow redefinition of the function. The
1062 semantics when both copies of bodies differ is not well defined.
1063 We replace the old body with new body so in unit at a time mode
1064 we always use new body, while in normal mode we may end up with
1065 old body inlined into some functions and new body expanded and
1066 inlined in others. */
1067 void reset (void);
1068
1069 /* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
1070 kind of wrapper method. */
1071 void create_wrapper (cgraph_node *target);
1072
1073 /* Verify cgraph nodes of the cgraph node. */
1074 void DEBUG_FUNCTION verify_node (void);
1075
1076 /* Remove function from symbol table. */
1077 void remove (void);
1078
1079 /* Dump call graph node to file F. */
1080 void dump (FILE *f);
1081
1082 /* Dump call graph node to stderr. */
1083 void DEBUG_FUNCTION debug (void);
1084
1085 /* When doing LTO, read cgraph_node's body from disk if it is not already
1086 present. */
1087 bool get_untransformed_body (void);
1088
1089 /* Prepare function body. When doing LTO, read cgraph_node's body from disk
1090 if it is not already present. When some IPA transformations are scheduled,
1091 apply them. */
1092 bool get_body (void);
1093
1094 /* Release memory used to represent body of function.
1095 Use this only for functions that are released before being translated to
1096 target code (i.e. RTL). Functions that are compiled to RTL and beyond
1097 are free'd in final.c via free_after_compilation(). */
1098 void release_body (bool keep_arguments = false);
1099
1100 /* Return the DECL_STRUCT_FUNCTION of the function. */
1101 struct function *get_fun (void);
1102
1103 /* cgraph_node is no longer nested function; update cgraph accordingly. */
1104 void unnest (void);
1105
1106 /* Bring cgraph node local. */
1107 void make_local (void);
1108
1109 /* Likewise indicate that a node is having address taken. */
1110 void mark_address_taken (void);
1111
1112 /* Set fialization priority to PRIORITY. */
1113 void set_fini_priority (priority_type priority);
1114
1115 /* Return the finalization priority. */
1116 priority_type get_fini_priority (void);
1117
1118 /* Create edge from a given function to CALLEE in the cgraph. */
1119 cgraph_edge *create_edge (cgraph_node *callee,
1120 gcall *call_stmt, profile_count count);
1121
1122 /* Create an indirect edge with a yet-undetermined callee where the call
1123 statement destination is a formal parameter of the caller with index
1124 PARAM_INDEX. */
1125 cgraph_edge *create_indirect_edge (gcall *call_stmt, int ecf_flags,
1126 profile_count count,
1127 bool compute_indirect_info = true);
1128
1129 /* Like cgraph_create_edge walk the clone tree and update all clones sharing
1130 same function body. If clones already have edge for OLD_STMT; only
1131 update the edge same way as cgraph_set_call_stmt_including_clones does. */
1132 void create_edge_including_clones (cgraph_node *callee,
1133 gimple *old_stmt, gcall *stmt,
1134 profile_count count,
1135 cgraph_inline_failed_t reason);
1136
1137 /* Return the callgraph edge representing the GIMPLE_CALL statement
1138 CALL_STMT. */
1139 cgraph_edge *get_edge (gimple *call_stmt);
1140
1141 /* Collect all callers of cgraph_node and its aliases that are known to lead
1142 to NODE (i.e. are not overwritable) and that are not thunks. */
1143 vec<cgraph_edge *> collect_callers (void);
1144
1145 /* Remove all callers from the node. */
1146 void remove_callers (void);
1147
1148 /* Remove all callees from the node. */
1149 void remove_callees (void);
1150
1151 /* Return function availability. See cgraph.h for description of individual
1152 return values. */
1153 enum availability get_availability (symtab_node *ref = NULL);
1154
1155 /* Set TREE_NOTHROW on cgraph_node's decl and on aliases of the node
1156 if any to NOTHROW. */
1157 bool set_nothrow_flag (bool nothrow);
1158
1159 /* SET DECL_IS_MALLOC on cgraph_node's decl and on aliases of the node
1160 if any. */
1161 bool set_malloc_flag (bool malloc_p);
1162
1163 /* If SET_CONST is true, mark function, aliases and thunks to be ECF_CONST.
1164 If SET_CONST if false, clear the flag.
1165
1166 When setting the flag be careful about possible interposition and
1167 do not set the flag for functions that can be interposet and set pure
1168 flag for functions that can bind to other definition.
1169
1170 Return true if any change was done. */
1171
1172 bool set_const_flag (bool set_const, bool looping);
1173
1174 /* Set DECL_PURE_P on cgraph_node's decl and on aliases of the node
1175 if any to PURE.
1176
1177 When setting the flag, be careful about possible interposition.
1178 Return true if any change was done. */
1179
1180 bool set_pure_flag (bool pure, bool looping);
1181
1182 /* Call callback on function and aliases associated to the function.
1183 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
1184 skipped. */
1185
1186 bool call_for_symbol_and_aliases (bool (*callback) (cgraph_node *,
1187 void *),
1188 void *data, bool include_overwritable);
1189
1190 /* Call callback on cgraph_node, thunks and aliases associated to NODE.
1191 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
1192 skipped. When EXCLUDE_VIRTUAL_THUNKS is true, virtual thunks are
1193 skipped. */
1194 bool call_for_symbol_thunks_and_aliases (bool (*callback) (cgraph_node *node,
1195 void *data),
1196 void *data,
1197 bool include_overwritable,
1198 bool exclude_virtual_thunks = false);
1199
1200 /* Likewise indicate that a node is needed, i.e. reachable via some
1201 external means. */
1202 inline void mark_force_output (void);
1203
1204 /* Return true when function can be marked local. */
1205 bool local_p (void);
1206
1207 /* Return true if cgraph_node can be made local for API change.
1208 Extern inline functions and C++ COMDAT functions can be made local
1209 at the expense of possible code size growth if function is used in multiple
1210 compilation units. */
1211 bool can_be_local_p (void);
1212
1213 /* Return true when cgraph_node can not return or throw and thus
1214 it is safe to ignore its side effects for IPA analysis. */
1215 bool cannot_return_p (void);
1216
1217 /* Return true when function cgraph_node and all its aliases are only called
1218 directly.
1219 i.e. it is not externally visible, address was not taken and
1220 it is not used in any other non-standard way. */
1221 bool only_called_directly_p (void);
1222
1223 /* Return true when function is only called directly or it has alias.
1224 i.e. it is not externally visible, address was not taken and
1225 it is not used in any other non-standard way. */
1226 inline bool only_called_directly_or_aliased_p (void);
1227
1228 /* Return true when function cgraph_node can be expected to be removed
1229 from program when direct calls in this compilation unit are removed.
1230
1231 As a special case COMDAT functions are
1232 cgraph_can_remove_if_no_direct_calls_p while the are not
1233 cgraph_only_called_directly_p (it is possible they are called from other
1234 unit)
1235
1236 This function behaves as cgraph_only_called_directly_p because eliminating
1237 all uses of COMDAT function does not make it necessarily disappear from
1238 the program unless we are compiling whole program or we do LTO. In this
1239 case we know we win since dynamic linking will not really discard the
1240 linkonce section.
1241
1242 If WILL_INLINE is true, assume that function will be inlined into all the
1243 direct calls. */
1244 bool will_be_removed_from_program_if_no_direct_calls_p
1245 (bool will_inline = false);
1246
1247 /* Return true when function can be removed from callgraph
1248 if all direct calls and references are eliminated. The function does
1249 not take into account comdat groups. */
1250 bool can_remove_if_no_direct_calls_and_refs_p (void);
1251
1252 /* Return true when function cgraph_node and its aliases can be removed from
1253 callgraph if all direct calls are eliminated.
1254 If WILL_INLINE is true, assume that function will be inlined into all the
1255 direct calls. */
1256 bool can_remove_if_no_direct_calls_p (bool will_inline = false);
1257
1258 /* Return true when callgraph node is a function with Gimple body defined
1259 in current unit. Functions can also be define externally or they
1260 can be thunks with no Gimple representation.
1261
1262 Note that at WPA stage, the function body may not be present in memory. */
1263 inline bool has_gimple_body_p (void);
1264
1265 /* Return true if function should be optimized for size. */
1266 bool optimize_for_size_p (void);
1267
1268 /* Dump the callgraph to file F. */
1269 static void dump_cgraph (FILE *f);
1270
1271 /* Dump the call graph to stderr. */
1272 static inline
1273 void debug_cgraph (void)
1274 {
1275 dump_cgraph (stderr);
1276 }
1277
1278 /* Get unique identifier of the node. */
1279 inline int get_uid ()
1280 {
1281 return m_uid;
1282 }
1283
1284 /* Record that DECL1 and DECL2 are semantically identical function
1285 versions. */
1286 static void record_function_versions (tree decl1, tree decl2);
1287
1288 /* Remove the cgraph_function_version_info and cgraph_node for DECL. This
1289 DECL is a duplicate declaration. */
1290 static void delete_function_version_by_decl (tree decl);
1291
1292 /* Add the function FNDECL to the call graph.
1293 Unlike finalize_function, this function is intended to be used
1294 by middle end and allows insertion of new function at arbitrary point
1295 of compilation. The function can be either in high, low or SSA form
1296 GIMPLE.
1297
1298 The function is assumed to be reachable and have address taken (so no
1299 API breaking optimizations are performed on it).
1300
1301 Main work done by this function is to enqueue the function for later
1302 processing to avoid need the passes to be re-entrant. */
1303 static void add_new_function (tree fndecl, bool lowered);
1304
1305 /* Return callgraph node for given symbol and check it is a function. */
1306 static inline cgraph_node *get (const_tree decl)
1307 {
1308 gcc_checking_assert (TREE_CODE (decl) == FUNCTION_DECL);
1309 return dyn_cast <cgraph_node *> (symtab_node::get (decl));
1310 }
1311
1312 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
1313 logic in effect. If NO_COLLECT is true, then our caller cannot stand to
1314 have the garbage collector run at the moment. We would need to either
1315 create a new GC context, or just not compile right now. */
1316 static void finalize_function (tree, bool);
1317
1318 /* Return cgraph node assigned to DECL. Create new one when needed. */
1319 static cgraph_node * create (tree decl);
1320
1321 /* Try to find a call graph node for declaration DECL and if it does not
1322 exist or if it corresponds to an inline clone, create a new one. */
1323 static cgraph_node * get_create (tree);
1324
1325 /* Return local info for the compiled function. */
1326 static cgraph_local_info *local_info (tree decl);
1327
1328 /* Return local info for the compiled function. */
1329 static struct cgraph_rtl_info *rtl_info (tree);
1330
1331 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
1332 Return NULL if there's no such node. */
1333 static cgraph_node *get_for_asmname (tree asmname);
1334
1335 /* Attempt to mark ALIAS as an alias to DECL. Return alias node if
1336 successful and NULL otherwise.
1337 Same body aliases are output whenever the body of DECL is output,
1338 and cgraph_node::get (ALIAS) transparently
1339 returns cgraph_node::get (DECL). */
1340 static cgraph_node * create_same_body_alias (tree alias, tree decl);
1341
1342 /* Verify whole cgraph structure. */
1343 static void DEBUG_FUNCTION verify_cgraph_nodes (void);
1344
1345 /* Verify cgraph, if consistency checking is enabled. */
1346 static inline void checking_verify_cgraph_nodes (void);
1347
1348 /* Worker to bring NODE local. */
1349 static bool make_local (cgraph_node *node, void *);
1350
1351 /* Mark ALIAS as an alias to DECL. DECL_NODE is cgraph node representing
1352 the function body is associated
1353 with (not necessarily cgraph_node (DECL). */
1354 static cgraph_node *create_alias (tree alias, tree target);
1355
1356 /* Return true if NODE has thunk. */
1357 static bool has_thunk_p (cgraph_node *node, void *);
1358
1359 cgraph_edge *callees;
1360 cgraph_edge *callers;
1361 /* List of edges representing indirect calls with a yet undetermined
1362 callee. */
1363 cgraph_edge *indirect_calls;
1364 /* For nested functions points to function the node is nested in. */
1365 cgraph_node *origin;
1366 /* Points to first nested function, if any. */
1367 cgraph_node *nested;
1368 /* Pointer to the next function with same origin, if any. */
1369 cgraph_node *next_nested;
1370 /* Pointer to the next clone. */
1371 cgraph_node *next_sibling_clone;
1372 cgraph_node *prev_sibling_clone;
1373 cgraph_node *clones;
1374 cgraph_node *clone_of;
1375 /* For functions with many calls sites it holds map from call expression
1376 to the edge to speed up cgraph_edge function. */
1377 hash_table<cgraph_edge_hasher> *GTY(()) call_site_hash;
1378 /* Declaration node used to be clone of. */
1379 tree former_clone_of;
1380
1381 /* If this is a SIMD clone, this points to the SIMD specific
1382 information for it. */
1383 cgraph_simd_clone *simdclone;
1384 /* If this function has SIMD clones, this points to the first clone. */
1385 cgraph_node *simd_clones;
1386
1387 /* Interprocedural passes scheduled to have their transform functions
1388 applied next time we execute local pass on them. We maintain it
1389 per-function in order to allow IPA passes to introduce new functions. */
1390 vec<ipa_opt_pass> GTY((skip)) ipa_transforms_to_apply;
1391
1392 cgraph_local_info local;
1393 cgraph_global_info global;
1394 struct cgraph_rtl_info *rtl;
1395 cgraph_clone_info clone;
1396 cgraph_thunk_info thunk;
1397
1398 /* Expected number of executions: calculated in profile.c. */
1399 profile_count count;
1400 /* How to scale counts at materialization time; used to merge
1401 LTO units with different number of profile runs. */
1402 int count_materialization_scale;
1403 /* ID assigned by the profiling. */
1404 unsigned int profile_id;
1405 /* Time profiler: first run of function. */
1406 int tp_first_run;
1407
1408 /* Set when decl is an abstract function pointed to by the
1409 ABSTRACT_DECL_ORIGIN of a reachable function. */
1410 unsigned used_as_abstract_origin : 1;
1411 /* Set once the function is lowered (i.e. its CFG is built). */
1412 unsigned lowered : 1;
1413 /* Set once the function has been instantiated and its callee
1414 lists created. */
1415 unsigned process : 1;
1416 /* How commonly executed the node is. Initialized during branch
1417 probabilities pass. */
1418 ENUM_BITFIELD (node_frequency) frequency : 2;
1419 /* True when function can only be called at startup (from static ctor). */
1420 unsigned only_called_at_startup : 1;
1421 /* True when function can only be called at startup (from static dtor). */
1422 unsigned only_called_at_exit : 1;
1423 /* True when function is the transactional clone of a function which
1424 is called only from inside transactions. */
1425 /* ?? We should be able to remove this. We have enough bits in
1426 cgraph to calculate it. */
1427 unsigned tm_clone : 1;
1428 /* True if this decl is a dispatcher for function versions. */
1429 unsigned dispatcher_function : 1;
1430 /* True if this decl calls a COMDAT-local function. This is set up in
1431 compute_fn_summary and inline_call. */
1432 unsigned calls_comdat_local : 1;
1433 /* True if node has been created by merge operation in IPA-ICF. */
1434 unsigned icf_merged: 1;
1435 /* True if call to node can't result in a call to free, munmap or
1436 other operation that could make previously non-trapping memory
1437 accesses trapping. */
1438 unsigned nonfreeing_fn : 1;
1439 /* True if there was multiple COMDAT bodies merged by lto-symtab. */
1440 unsigned merged_comdat : 1;
1441 /* True if function was created to be executed in parallel. */
1442 unsigned parallelized_function : 1;
1443 /* True if function is part split out by ipa-split. */
1444 unsigned split_part : 1;
1445 /* True if the function appears as possible target of indirect call. */
1446 unsigned indirect_call_target : 1;
1447
1448 private:
1449 /* Unique id of the node. */
1450 int m_uid;
1451
1452 /* Worker for call_for_symbol_and_aliases. */
1453 bool call_for_symbol_and_aliases_1 (bool (*callback) (cgraph_node *,
1454 void *),
1455 void *data, bool include_overwritable);
1456 };
1457
1458 /* A cgraph node set is a collection of cgraph nodes. A cgraph node
1459 can appear in multiple sets. */
1460 struct cgraph_node_set_def
1461 {
1462 hash_map<cgraph_node *, size_t> *map;
1463 vec<cgraph_node *> nodes;
1464 };
1465
1466 typedef cgraph_node_set_def *cgraph_node_set;
1467 typedef struct varpool_node_set_def *varpool_node_set;
1468
1469 class varpool_node;
1470
1471 /* A varpool node set is a collection of varpool nodes. A varpool node
1472 can appear in multiple sets. */
1473 struct varpool_node_set_def
1474 {
1475 hash_map<varpool_node *, size_t> * map;
1476 vec<varpool_node *> nodes;
1477 };
1478
1479 /* Iterator structure for cgraph node sets. */
1480 struct cgraph_node_set_iterator
1481 {
1482 cgraph_node_set set;
1483 unsigned index;
1484 };
1485
1486 /* Iterator structure for varpool node sets. */
1487 struct varpool_node_set_iterator
1488 {
1489 varpool_node_set set;
1490 unsigned index;
1491 };
1492
1493 /* Context of polymorphic call. It represent information about the type of
1494 instance that may reach the call. This is used by ipa-devirt walkers of the
1495 type inheritance graph. */
1496
1497 class GTY(()) ipa_polymorphic_call_context {
1498 public:
1499 /* The called object appears in an object of type OUTER_TYPE
1500 at offset OFFSET. When information is not 100% reliable, we
1501 use SPECULATIVE_OUTER_TYPE and SPECULATIVE_OFFSET. */
1502 HOST_WIDE_INT offset;
1503 HOST_WIDE_INT speculative_offset;
1504 tree outer_type;
1505 tree speculative_outer_type;
1506 /* True if outer object may be in construction or destruction. */
1507 unsigned maybe_in_construction : 1;
1508 /* True if outer object may be of derived type. */
1509 unsigned maybe_derived_type : 1;
1510 /* True if speculative outer object may be of derived type. We always
1511 speculate that construction does not happen. */
1512 unsigned speculative_maybe_derived_type : 1;
1513 /* True if the context is invalid and all calls should be redirected
1514 to BUILTIN_UNREACHABLE. */
1515 unsigned invalid : 1;
1516 /* True if the outer type is dynamic. */
1517 unsigned dynamic : 1;
1518
1519 /* Build empty "I know nothing" context. */
1520 ipa_polymorphic_call_context ();
1521 /* Build polymorphic call context for indirect call E. */
1522 ipa_polymorphic_call_context (cgraph_edge *e);
1523 /* Build polymorphic call context for IP invariant CST.
1524 If specified, OTR_TYPE specify the type of polymorphic call
1525 that takes CST+OFFSET as a prameter. */
1526 ipa_polymorphic_call_context (tree cst, tree otr_type = NULL,
1527 HOST_WIDE_INT offset = 0);
1528 /* Build context for pointer REF contained in FNDECL at statement STMT.
1529 if INSTANCE is non-NULL, return pointer to the object described by
1530 the context. */
1531 ipa_polymorphic_call_context (tree fndecl, tree ref, gimple *stmt,
1532 tree *instance = NULL);
1533
1534 /* Look for vtable stores or constructor calls to work out dynamic type
1535 of memory location. */
1536 bool get_dynamic_type (tree, tree, tree, gimple *);
1537
1538 /* Make context non-speculative. */
1539 void clear_speculation ();
1540
1541 /* Produce context specifying all derrived types of OTR_TYPE. If OTR_TYPE is
1542 NULL, the context is set to dummy "I know nothing" setting. */
1543 void clear_outer_type (tree otr_type = NULL);
1544
1545 /* Walk container types and modify context to point to actual class
1546 containing OTR_TYPE (if non-NULL) as base class.
1547 Return true if resulting context is valid.
1548
1549 When CONSIDER_PLACEMENT_NEW is false, reject contexts that may be made
1550 valid only via allocation of new polymorphic type inside by means
1551 of placement new.
1552
1553 When CONSIDER_BASES is false, only look for actual fields, not base types
1554 of TYPE. */
1555 bool restrict_to_inner_class (tree otr_type,
1556 bool consider_placement_new = true,
1557 bool consider_bases = true);
1558
1559 /* Adjust all offsets in contexts by given number of bits. */
1560 void offset_by (HOST_WIDE_INT);
1561 /* Use when we can not track dynamic type change. This speculatively assume
1562 type change is not happening. */
1563 void possible_dynamic_type_change (bool, tree otr_type = NULL);
1564 /* Assume that both THIS and a given context is valid and strenghten THIS
1565 if possible. Return true if any strenghtening was made.
1566 If actual type the context is being used in is known, OTR_TYPE should be
1567 set accordingly. This improves quality of combined result. */
1568 bool combine_with (ipa_polymorphic_call_context, tree otr_type = NULL);
1569 bool meet_with (ipa_polymorphic_call_context, tree otr_type = NULL);
1570
1571 /* Return TRUE if context is fully useless. */
1572 bool useless_p () const;
1573 /* Return TRUE if this context conveys the same information as X. */
1574 bool equal_to (const ipa_polymorphic_call_context &x) const;
1575
1576 /* Dump human readable context to F. If NEWLINE is true, it will be
1577 terminated by a newline. */
1578 void dump (FILE *f, bool newline = true) const;
1579 void DEBUG_FUNCTION debug () const;
1580
1581 /* LTO streaming. */
1582 void stream_out (struct output_block *) const;
1583 void stream_in (struct lto_input_block *, struct data_in *data_in);
1584
1585 private:
1586 bool combine_speculation_with (tree, HOST_WIDE_INT, bool, tree);
1587 bool meet_speculation_with (tree, HOST_WIDE_INT, bool, tree);
1588 void set_by_decl (tree, HOST_WIDE_INT);
1589 bool set_by_invariant (tree, tree, HOST_WIDE_INT);
1590 bool speculation_consistent_p (tree, HOST_WIDE_INT, bool, tree) const;
1591 void make_speculative (tree otr_type = NULL);
1592 };
1593
1594 /* Structure containing additional information about an indirect call. */
1595
1596 struct GTY(()) cgraph_indirect_call_info
1597 {
1598 /* When agg_content is set, an offset where the call pointer is located
1599 within the aggregate. */
1600 HOST_WIDE_INT offset;
1601 /* Context of the polymorphic call; use only when POLYMORPHIC flag is set. */
1602 ipa_polymorphic_call_context context;
1603 /* OBJ_TYPE_REF_TOKEN of a polymorphic call (if polymorphic is set). */
1604 HOST_WIDE_INT otr_token;
1605 /* Type of the object from OBJ_TYPE_REF_OBJECT. */
1606 tree otr_type;
1607 /* Index of the parameter that is called. */
1608 int param_index;
1609 /* ECF flags determined from the caller. */
1610 int ecf_flags;
1611 /* Profile_id of common target obtrained from profile. */
1612 int common_target_id;
1613 /* Probability that call will land in function with COMMON_TARGET_ID. */
1614 int common_target_probability;
1615
1616 /* Set when the call is a virtual call with the parameter being the
1617 associated object pointer rather than a simple direct call. */
1618 unsigned polymorphic : 1;
1619 /* Set when the call is a call of a pointer loaded from contents of an
1620 aggregate at offset. */
1621 unsigned agg_contents : 1;
1622 /* Set when this is a call through a member pointer. */
1623 unsigned member_ptr : 1;
1624 /* When the agg_contents bit is set, this one determines whether the
1625 destination is loaded from a parameter passed by reference. */
1626 unsigned by_ref : 1;
1627 /* When the agg_contents bit is set, this one determines whether we can
1628 deduce from the function body that the loaded value from the reference is
1629 never modified between the invocation of the function and the load
1630 point. */
1631 unsigned guaranteed_unmodified : 1;
1632 /* For polymorphic calls this specify whether the virtual table pointer
1633 may have changed in between function entry and the call. */
1634 unsigned vptr_changed : 1;
1635 };
1636
1637 struct GTY((chain_next ("%h.next_caller"), chain_prev ("%h.prev_caller"),
1638 for_user)) cgraph_edge {
1639 friend class cgraph_node;
1640 friend class symbol_table;
1641
1642 /* Remove the edge in the cgraph. */
1643 void remove (void);
1644
1645 /* Change field call_stmt of edge to NEW_STMT.
1646 If UPDATE_SPECULATIVE and E is any component of speculative
1647 edge, then update all components. */
1648 void set_call_stmt (gcall *new_stmt, bool update_speculative = true);
1649
1650 /* Redirect callee of the edge to N. The function does not update underlying
1651 call expression. */
1652 void redirect_callee (cgraph_node *n);
1653
1654 /* If the edge does not lead to a thunk, simply redirect it to N. Otherwise
1655 create one or more equivalent thunks for N and redirect E to the first in
1656 the chain. Note that it is then necessary to call
1657 n->expand_all_artificial_thunks once all callers are redirected. */
1658 void redirect_callee_duplicating_thunks (cgraph_node *n);
1659
1660 /* Make an indirect edge with an unknown callee an ordinary edge leading to
1661 CALLEE. DELTA is an integer constant that is to be added to the this
1662 pointer (first parameter) to compensate for skipping
1663 a thunk adjustment. */
1664 cgraph_edge *make_direct (cgraph_node *callee);
1665
1666 /* Turn edge into speculative call calling N2. Update
1667 the profile so the direct call is taken COUNT times
1668 with FREQUENCY. */
1669 cgraph_edge *make_speculative (cgraph_node *n2, profile_count direct_count);
1670
1671 /* Given speculative call edge, return all three components. */
1672 void speculative_call_info (cgraph_edge *&direct, cgraph_edge *&indirect,
1673 ipa_ref *&reference);
1674
1675 /* Speculative call edge turned out to be direct call to CALLE_DECL.
1676 Remove the speculative call sequence and return edge representing the call.
1677 It is up to caller to redirect the call as appropriate. */
1678 cgraph_edge *resolve_speculation (tree callee_decl = NULL);
1679
1680 /* If necessary, change the function declaration in the call statement
1681 associated with the edge so that it corresponds to the edge callee. */
1682 gimple *redirect_call_stmt_to_callee (void);
1683
1684 /* Create clone of edge in the node N represented
1685 by CALL_EXPR the callgraph. */
1686 cgraph_edge * clone (cgraph_node *n, gcall *call_stmt, unsigned stmt_uid,
1687 profile_count num, profile_count den,
1688 bool update_original);
1689
1690 /* Verify edge count and frequency. */
1691 bool verify_count ();
1692
1693 /* Return true when call of edge can not lead to return from caller
1694 and thus it is safe to ignore its side effects for IPA analysis
1695 when computing side effects of the caller. */
1696 bool cannot_lead_to_return_p (void);
1697
1698 /* Return true when the edge represents a direct recursion. */
1699 bool recursive_p (void);
1700
1701 /* Return true if the call can be hot. */
1702 bool maybe_hot_p (void);
1703
1704 /* Get unique identifier of the edge. */
1705 inline int get_uid ()
1706 {
1707 return m_uid;
1708 }
1709
1710 /* Rebuild cgraph edges for current function node. This needs to be run after
1711 passes that don't update the cgraph. */
1712 static unsigned int rebuild_edges (void);
1713
1714 /* Rebuild cgraph references for current function node. This needs to be run
1715 after passes that don't update the cgraph. */
1716 static void rebuild_references (void);
1717
1718 /* Expected number of executions: calculated in profile.c. */
1719 profile_count count;
1720 cgraph_node *caller;
1721 cgraph_node *callee;
1722 cgraph_edge *prev_caller;
1723 cgraph_edge *next_caller;
1724 cgraph_edge *prev_callee;
1725 cgraph_edge *next_callee;
1726 gcall *call_stmt;
1727 /* Additional information about an indirect call. Not cleared when an edge
1728 becomes direct. */
1729 cgraph_indirect_call_info *indirect_info;
1730 PTR GTY ((skip (""))) aux;
1731 /* When equal to CIF_OK, inline this call. Otherwise, points to the
1732 explanation why function was not inlined. */
1733 enum cgraph_inline_failed_t inline_failed;
1734 /* The stmt_uid of call_stmt. This is used by LTO to recover the call_stmt
1735 when the function is serialized in. */
1736 unsigned int lto_stmt_uid;
1737 /* Whether this edge was made direct by indirect inlining. */
1738 unsigned int indirect_inlining_edge : 1;
1739 /* Whether this edge describes an indirect call with an undetermined
1740 callee. */
1741 unsigned int indirect_unknown_callee : 1;
1742 /* Whether this edge is still a dangling */
1743 /* True if the corresponding CALL stmt cannot be inlined. */
1744 unsigned int call_stmt_cannot_inline_p : 1;
1745 /* Can this call throw externally? */
1746 unsigned int can_throw_external : 1;
1747 /* Edges with SPECULATIVE flag represents indirect calls that was
1748 speculatively turned into direct (i.e. by profile feedback).
1749 The final code sequence will have form:
1750
1751 if (call_target == expected_fn)
1752 expected_fn ();
1753 else
1754 call_target ();
1755
1756 Every speculative call is represented by three components attached
1757 to a same call statement:
1758 1) a direct call (to expected_fn)
1759 2) an indirect call (to call_target)
1760 3) a IPA_REF_ADDR refrence to expected_fn.
1761
1762 Optimizers may later redirect direct call to clone, so 1) and 3)
1763 do not need to necesarily agree with destination. */
1764 unsigned int speculative : 1;
1765 /* Set to true when caller is a constructor or destructor of polymorphic
1766 type. */
1767 unsigned in_polymorphic_cdtor : 1;
1768
1769 /* Return true if call must bind to current definition. */
1770 bool binds_to_current_def_p ();
1771
1772 /* Expected frequency of executions within the function.
1773 When set to CGRAPH_FREQ_BASE, the edge is expected to be called once
1774 per function call. The range is 0 to CGRAPH_FREQ_MAX. */
1775 int frequency ();
1776
1777 /* Expected frequency of executions within the function. */
1778 sreal sreal_frequency ();
1779 private:
1780 /* Unique id of the edge. */
1781 int m_uid;
1782
1783 /* Remove the edge from the list of the callers of the callee. */
1784 void remove_caller (void);
1785
1786 /* Remove the edge from the list of the callees of the caller. */
1787 void remove_callee (void);
1788
1789 /* Set callee N of call graph edge and add it to the corresponding set of
1790 callers. */
1791 void set_callee (cgraph_node *n);
1792
1793 /* Output flags of edge to a file F. */
1794 void dump_edge_flags (FILE *f);
1795
1796 /* Verify that call graph edge corresponds to DECL from the associated
1797 statement. Return true if the verification should fail. */
1798 bool verify_corresponds_to_fndecl (tree decl);
1799 };
1800
1801 #define CGRAPH_FREQ_BASE 1000
1802 #define CGRAPH_FREQ_MAX 100000
1803
1804 /* The varpool data structure.
1805 Each static variable decl has assigned varpool_node. */
1806
1807 class GTY((tag ("SYMTAB_VARIABLE"))) varpool_node : public symtab_node {
1808 public:
1809 /* Dump given varpool node to F. */
1810 void dump (FILE *f);
1811
1812 /* Dump given varpool node to stderr. */
1813 void DEBUG_FUNCTION debug (void);
1814
1815 /* Remove variable from symbol table. */
1816 void remove (void);
1817
1818 /* Remove node initializer when it is no longer needed. */
1819 void remove_initializer (void);
1820
1821 void analyze (void);
1822
1823 /* Return variable availability. */
1824 availability get_availability (symtab_node *ref = NULL);
1825
1826 /* When doing LTO, read variable's constructor from disk if
1827 it is not already present. */
1828 tree get_constructor (void);
1829
1830 /* Return true if variable has constructor that can be used for folding. */
1831 bool ctor_useable_for_folding_p (void);
1832
1833 /* For given variable pool node, walk the alias chain to return the function
1834 the variable is alias of. Do not walk through thunks.
1835 When AVAILABILITY is non-NULL, get minimal availability in the chain.
1836 When REF is non-NULL, assume that reference happens in symbol REF
1837 when determining the availability. */
1838 inline varpool_node *ultimate_alias_target
1839 (availability *availability = NULL, symtab_node *ref = NULL);
1840
1841 /* Return node that alias is aliasing. */
1842 inline varpool_node *get_alias_target (void);
1843
1844 /* Output one variable, if necessary. Return whether we output it. */
1845 bool assemble_decl (void);
1846
1847 /* For variables in named sections make sure get_variable_section
1848 is called before we switch to those sections. Then section
1849 conflicts between read-only and read-only requiring relocations
1850 sections can be resolved. */
1851 void finalize_named_section_flags (void);
1852
1853 /* Call calback on varpool symbol and aliases associated to varpool symbol.
1854 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
1855 skipped. */
1856 bool call_for_symbol_and_aliases (bool (*callback) (varpool_node *, void *),
1857 void *data,
1858 bool include_overwritable);
1859
1860 /* Return true when variable should be considered externally visible. */
1861 bool externally_visible_p (void);
1862
1863 /* Return true when all references to variable must be visible
1864 in ipa_ref_list.
1865 i.e. if the variable is not externally visible or not used in some magic
1866 way (asm statement or such).
1867 The magic uses are all summarized in force_output flag. */
1868 inline bool all_refs_explicit_p ();
1869
1870 /* Return true when variable can be removed from variable pool
1871 if all direct calls are eliminated. */
1872 inline bool can_remove_if_no_refs_p (void);
1873
1874 /* Add the variable DECL to the varpool.
1875 Unlike finalize_decl function is intended to be used
1876 by middle end and allows insertion of new variable at arbitrary point
1877 of compilation. */
1878 static void add (tree decl);
1879
1880 /* Return varpool node for given symbol and check it is a function. */
1881 static inline varpool_node *get (const_tree decl);
1882
1883 /* Mark DECL as finalized. By finalizing the declaration, frontend instruct
1884 the middle end to output the variable to asm file, if needed or externally
1885 visible. */
1886 static void finalize_decl (tree decl);
1887
1888 /* Attempt to mark ALIAS as an alias to DECL. Return TRUE if successful.
1889 Extra name aliases are output whenever DECL is output. */
1890 static varpool_node * create_extra_name_alias (tree alias, tree decl);
1891
1892 /* Attempt to mark ALIAS as an alias to DECL. Return TRUE if successful.
1893 Extra name aliases are output whenever DECL is output. */
1894 static varpool_node * create_alias (tree, tree);
1895
1896 /* Dump the variable pool to F. */
1897 static void dump_varpool (FILE *f);
1898
1899 /* Dump the variable pool to stderr. */
1900 static void DEBUG_FUNCTION debug_varpool (void);
1901
1902 /* Allocate new callgraph node and insert it into basic data structures. */
1903 static varpool_node *create_empty (void);
1904
1905 /* Return varpool node assigned to DECL. Create new one when needed. */
1906 static varpool_node *get_create (tree decl);
1907
1908 /* Given an assembler name, lookup node. */
1909 static varpool_node *get_for_asmname (tree asmname);
1910
1911 /* Set when variable is scheduled to be assembled. */
1912 unsigned output : 1;
1913
1914 /* Set when variable has statically initialized pointer
1915 or is a static bounds variable and needs initalization. */
1916 unsigned need_bounds_init : 1;
1917
1918 /* Set if the variable is dynamically initialized, except for
1919 function local statics. */
1920 unsigned dynamically_initialized : 1;
1921
1922 ENUM_BITFIELD(tls_model) tls_model : 3;
1923
1924 /* Set if the variable is known to be used by single function only.
1925 This is computed by ipa_signle_use pass and used by late optimizations
1926 in places where optimization would be valid for local static variable
1927 if we did not do any inter-procedural code movement. */
1928 unsigned used_by_single_function : 1;
1929
1930 private:
1931 /* Assemble thunks and aliases associated to varpool node. */
1932 void assemble_aliases (void);
1933
1934 /* Worker for call_for_node_and_aliases. */
1935 bool call_for_symbol_and_aliases_1 (bool (*callback) (varpool_node *, void *),
1936 void *data,
1937 bool include_overwritable);
1938 };
1939
1940 /* Every top level asm statement is put into a asm_node. */
1941
1942 struct GTY(()) asm_node {
1943
1944
1945 /* Next asm node. */
1946 asm_node *next;
1947 /* String for this asm node. */
1948 tree asm_str;
1949 /* Ordering of all cgraph nodes. */
1950 int order;
1951 };
1952
1953 /* Report whether or not THIS symtab node is a function, aka cgraph_node. */
1954
1955 template <>
1956 template <>
1957 inline bool
1958 is_a_helper <cgraph_node *>::test (symtab_node *p)
1959 {
1960 return p && p->type == SYMTAB_FUNCTION;
1961 }
1962
1963 /* Report whether or not THIS symtab node is a vriable, aka varpool_node. */
1964
1965 template <>
1966 template <>
1967 inline bool
1968 is_a_helper <varpool_node *>::test (symtab_node *p)
1969 {
1970 return p && p->type == SYMTAB_VARIABLE;
1971 }
1972
1973 /* Macros to access the next item in the list of free cgraph nodes and
1974 edges. */
1975 #define NEXT_FREE_NODE(NODE) dyn_cast<cgraph_node *> ((NODE)->next)
1976 #define SET_NEXT_FREE_NODE(NODE,NODE2) ((NODE))->next = NODE2
1977 #define NEXT_FREE_EDGE(EDGE) (EDGE)->prev_caller
1978
1979 typedef void (*cgraph_edge_hook)(cgraph_edge *, void *);
1980 typedef void (*cgraph_node_hook)(cgraph_node *, void *);
1981 typedef void (*varpool_node_hook)(varpool_node *, void *);
1982 typedef void (*cgraph_2edge_hook)(cgraph_edge *, cgraph_edge *, void *);
1983 typedef void (*cgraph_2node_hook)(cgraph_node *, cgraph_node *, void *);
1984
1985 struct cgraph_edge_hook_list;
1986 struct cgraph_node_hook_list;
1987 struct varpool_node_hook_list;
1988 struct cgraph_2edge_hook_list;
1989 struct cgraph_2node_hook_list;
1990
1991 /* Map from a symbol to initialization/finalization priorities. */
1992 struct GTY(()) symbol_priority_map {
1993 priority_type init;
1994 priority_type fini;
1995 };
1996
1997 enum symtab_state
1998 {
1999 /* Frontend is parsing and finalizing functions. */
2000 PARSING,
2001 /* Callgraph is being constructed. It is safe to add new functions. */
2002 CONSTRUCTION,
2003 /* Callgraph is being streamed-in at LTO time. */
2004 LTO_STREAMING,
2005 /* Callgraph is built and early IPA passes are being run. */
2006 IPA,
2007 /* Callgraph is built and all functions are transformed to SSA form. */
2008 IPA_SSA,
2009 /* All inline decisions are done; it is now possible to remove extern inline
2010 functions and virtual call targets. */
2011 IPA_SSA_AFTER_INLINING,
2012 /* Functions are now ordered and being passed to RTL expanders. */
2013 EXPANSION,
2014 /* All cgraph expansion is done. */
2015 FINISHED
2016 };
2017
2018 struct asmname_hasher : ggc_ptr_hash <symtab_node>
2019 {
2020 typedef const_tree compare_type;
2021
2022 static hashval_t hash (symtab_node *n);
2023 static bool equal (symtab_node *n, const_tree t);
2024 };
2025
2026 class GTY((tag ("SYMTAB"))) symbol_table
2027 {
2028 public:
2029 friend class symtab_node;
2030 friend class cgraph_node;
2031 friend class cgraph_edge;
2032
2033 symbol_table (): cgraph_max_uid (1), edges_max_uid (1)
2034 {
2035 }
2036
2037 /* Initialize callgraph dump file. */
2038 void initialize (void);
2039
2040 /* Register a top-level asm statement ASM_STR. */
2041 inline asm_node *finalize_toplevel_asm (tree asm_str);
2042
2043 /* Analyze the whole compilation unit once it is parsed completely. */
2044 void finalize_compilation_unit (void);
2045
2046 /* C++ frontend produce same body aliases all over the place, even before PCH
2047 gets streamed out. It relies on us linking the aliases with their function
2048 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
2049 first produce aliases without links, but once C++ FE is sure he won't sream
2050 PCH we build the links via this function. */
2051 void process_same_body_aliases (void);
2052
2053 /* Perform simple optimizations based on callgraph. */
2054 void compile (void);
2055
2056 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
2057 functions into callgraph in a way so they look like ordinary reachable
2058 functions inserted into callgraph already at construction time. */
2059 void process_new_functions (void);
2060
2061 /* Once all functions from compilation unit are in memory, produce all clones
2062 and update all calls. We might also do this on demand if we don't want to
2063 bring all functions to memory prior compilation, but current WHOPR
2064 implementation does that and it is bit easier to keep everything right
2065 in this order. */
2066 void materialize_all_clones (void);
2067
2068 /* Register a symbol NODE. */
2069 inline void register_symbol (symtab_node *node);
2070
2071 inline void
2072 clear_asm_symbols (void)
2073 {
2074 asmnodes = NULL;
2075 asm_last_node = NULL;
2076 }
2077
2078 /* Perform reachability analysis and reclaim all unreachable nodes. */
2079 bool remove_unreachable_nodes (FILE *file);
2080
2081 /* Optimization of function bodies might've rendered some variables as
2082 unnecessary so we want to avoid these from being compiled. Re-do
2083 reachability starting from variables that are either externally visible
2084 or was referred from the asm output routines. */
2085 void remove_unreferenced_decls (void);
2086
2087 /* Unregister a symbol NODE. */
2088 inline void unregister (symtab_node *node);
2089
2090 /* Allocate new callgraph node and insert it into basic data structures. */
2091 cgraph_node *create_empty (void);
2092
2093 /* Release a callgraph NODE. */
2094 void release_symbol (cgraph_node *node);
2095
2096 /* Output all variables enqueued to be assembled. */
2097 bool output_variables (void);
2098
2099 /* Weakrefs may be associated to external decls and thus not output
2100 at expansion time. Emit all necessary aliases. */
2101 void output_weakrefs (void);
2102
2103 /* Return first static symbol with definition. */
2104 inline symtab_node *first_symbol (void);
2105
2106 /* Return first assembler symbol. */
2107 inline asm_node *
2108 first_asm_symbol (void)
2109 {
2110 return asmnodes;
2111 }
2112
2113 /* Return first static symbol with definition. */
2114 inline symtab_node *first_defined_symbol (void);
2115
2116 /* Return first variable. */
2117 inline varpool_node *first_variable (void);
2118
2119 /* Return next variable after NODE. */
2120 inline varpool_node *next_variable (varpool_node *node);
2121
2122 /* Return first static variable with initializer. */
2123 inline varpool_node *first_static_initializer (void);
2124
2125 /* Return next static variable with initializer after NODE. */
2126 inline varpool_node *next_static_initializer (varpool_node *node);
2127
2128 /* Return first static variable with definition. */
2129 inline varpool_node *first_defined_variable (void);
2130
2131 /* Return next static variable with definition after NODE. */
2132 inline varpool_node *next_defined_variable (varpool_node *node);
2133
2134 /* Return first function with body defined. */
2135 inline cgraph_node *first_defined_function (void);
2136
2137 /* Return next function with body defined after NODE. */
2138 inline cgraph_node *next_defined_function (cgraph_node *node);
2139
2140 /* Return first function. */
2141 inline cgraph_node *first_function (void);
2142
2143 /* Return next function. */
2144 inline cgraph_node *next_function (cgraph_node *node);
2145
2146 /* Return first function with body defined. */
2147 cgraph_node *first_function_with_gimple_body (void);
2148
2149 /* Return next reachable static variable with initializer after NODE. */
2150 inline cgraph_node *next_function_with_gimple_body (cgraph_node *node);
2151
2152 /* Register HOOK to be called with DATA on each removed edge. */
2153 cgraph_edge_hook_list *add_edge_removal_hook (cgraph_edge_hook hook,
2154 void *data);
2155
2156 /* Remove ENTRY from the list of hooks called on removing edges. */
2157 void remove_edge_removal_hook (cgraph_edge_hook_list *entry);
2158
2159 /* Register HOOK to be called with DATA on each removed node. */
2160 cgraph_node_hook_list *add_cgraph_removal_hook (cgraph_node_hook hook,
2161 void *data);
2162
2163 /* Remove ENTRY from the list of hooks called on removing nodes. */
2164 void remove_cgraph_removal_hook (cgraph_node_hook_list *entry);
2165
2166 /* Register HOOK to be called with DATA on each removed node. */
2167 varpool_node_hook_list *add_varpool_removal_hook (varpool_node_hook hook,
2168 void *data);
2169
2170 /* Remove ENTRY from the list of hooks called on removing nodes. */
2171 void remove_varpool_removal_hook (varpool_node_hook_list *entry);
2172
2173 /* Register HOOK to be called with DATA on each inserted node. */
2174 cgraph_node_hook_list *add_cgraph_insertion_hook (cgraph_node_hook hook,
2175 void *data);
2176
2177 /* Remove ENTRY from the list of hooks called on inserted nodes. */
2178 void remove_cgraph_insertion_hook (cgraph_node_hook_list *entry);
2179
2180 /* Register HOOK to be called with DATA on each inserted node. */
2181 varpool_node_hook_list *add_varpool_insertion_hook (varpool_node_hook hook,
2182 void *data);
2183
2184 /* Remove ENTRY from the list of hooks called on inserted nodes. */
2185 void remove_varpool_insertion_hook (varpool_node_hook_list *entry);
2186
2187 /* Register HOOK to be called with DATA on each duplicated edge. */
2188 cgraph_2edge_hook_list *add_edge_duplication_hook (cgraph_2edge_hook hook,
2189 void *data);
2190 /* Remove ENTRY from the list of hooks called on duplicating edges. */
2191 void remove_edge_duplication_hook (cgraph_2edge_hook_list *entry);
2192
2193 /* Register HOOK to be called with DATA on each duplicated node. */
2194 cgraph_2node_hook_list *add_cgraph_duplication_hook (cgraph_2node_hook hook,
2195 void *data);
2196
2197 /* Remove ENTRY from the list of hooks called on duplicating nodes. */
2198 void remove_cgraph_duplication_hook (cgraph_2node_hook_list *entry);
2199
2200 /* Call all edge removal hooks. */
2201 void call_edge_removal_hooks (cgraph_edge *e);
2202
2203 /* Call all node insertion hooks. */
2204 void call_cgraph_insertion_hooks (cgraph_node *node);
2205
2206 /* Call all node removal hooks. */
2207 void call_cgraph_removal_hooks (cgraph_node *node);
2208
2209 /* Call all node duplication hooks. */
2210 void call_cgraph_duplication_hooks (cgraph_node *node, cgraph_node *node2);
2211
2212 /* Call all edge duplication hooks. */
2213 void call_edge_duplication_hooks (cgraph_edge *cs1, cgraph_edge *cs2);
2214
2215 /* Call all node removal hooks. */
2216 void call_varpool_removal_hooks (varpool_node *node);
2217
2218 /* Call all node insertion hooks. */
2219 void call_varpool_insertion_hooks (varpool_node *node);
2220
2221 /* Arrange node to be first in its entry of assembler_name_hash. */
2222 void symtab_prevail_in_asm_name_hash (symtab_node *node);
2223
2224 /* Initalize asm name hash unless. */
2225 void symtab_initialize_asm_name_hash (void);
2226
2227 /* Set the DECL_ASSEMBLER_NAME and update symtab hashtables. */
2228 void change_decl_assembler_name (tree decl, tree name);
2229
2230 /* Dump symbol table to F. */
2231 void dump (FILE *f);
2232
2233 /* Dump symbol table to stderr. */
2234 void DEBUG_FUNCTION debug (void);
2235
2236 /* Return true if assembler names NAME1 and NAME2 leads to the same symbol
2237 name. */
2238 static bool assembler_names_equal_p (const char *name1, const char *name2);
2239
2240 int cgraph_count;
2241 int cgraph_max_uid;
2242
2243 int edges_count;
2244 int edges_max_uid;
2245
2246 symtab_node* GTY(()) nodes;
2247 asm_node* GTY(()) asmnodes;
2248 asm_node* GTY(()) asm_last_node;
2249 cgraph_node* GTY(()) free_nodes;
2250
2251 /* Head of a linked list of unused (freed) call graph edges.
2252 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
2253 cgraph_edge * GTY(()) free_edges;
2254
2255 /* The order index of the next symtab node to be created. This is
2256 used so that we can sort the cgraph nodes in order by when we saw
2257 them, to support -fno-toplevel-reorder. */
2258 int order;
2259
2260 /* Set when whole unit has been analyzed so we can access global info. */
2261 bool global_info_ready;
2262 /* What state callgraph is in right now. */
2263 enum symtab_state state;
2264 /* Set when the cgraph is fully build and the basic flags are computed. */
2265 bool function_flags_ready;
2266
2267 bool cpp_implicit_aliases_done;
2268
2269 /* Hash table used to hold sectoons. */
2270 hash_table<section_name_hasher> *GTY(()) section_hash;
2271
2272 /* Hash table used to convert assembler names into nodes. */
2273 hash_table<asmname_hasher> *assembler_name_hash;
2274
2275 /* Hash table used to hold init priorities. */
2276 hash_map<symtab_node *, symbol_priority_map> *init_priority_hash;
2277
2278 FILE* GTY ((skip)) dump_file;
2279
2280 /* Return symbol used to separate symbol name from suffix. */
2281 static char symbol_suffix_separator ();
2282
2283 FILE* GTY ((skip)) ipa_clones_dump_file;
2284
2285 hash_set <const cgraph_node *> GTY ((skip)) cloned_nodes;
2286
2287 private:
2288 /* Allocate new callgraph node. */
2289 inline cgraph_node * allocate_cgraph_symbol (void);
2290
2291 /* Allocate a cgraph_edge structure and fill it with data according to the
2292 parameters of which only CALLEE can be NULL (when creating an indirect call
2293 edge). */
2294 cgraph_edge *create_edge (cgraph_node *caller, cgraph_node *callee,
2295 gcall *call_stmt, profile_count count,
2296 bool indir_unknown_callee);
2297
2298 /* Put the edge onto the free list. */
2299 void free_edge (cgraph_edge *e);
2300
2301 /* Insert NODE to assembler name hash. */
2302 void insert_to_assembler_name_hash (symtab_node *node, bool with_clones);
2303
2304 /* Remove NODE from assembler name hash. */
2305 void unlink_from_assembler_name_hash (symtab_node *node, bool with_clones);
2306
2307 /* Hash asmnames ignoring the user specified marks. */
2308 static hashval_t decl_assembler_name_hash (const_tree asmname);
2309
2310 /* Compare ASMNAME with the DECL_ASSEMBLER_NAME of DECL. */
2311 static bool decl_assembler_name_equal (tree decl, const_tree asmname);
2312
2313 friend struct asmname_hasher;
2314
2315 /* List of hooks triggered when an edge is removed. */
2316 cgraph_edge_hook_list * GTY((skip)) m_first_edge_removal_hook;
2317 /* List of hooks triggem_red when a cgraph node is removed. */
2318 cgraph_node_hook_list * GTY((skip)) m_first_cgraph_removal_hook;
2319 /* List of hooks triggered when an edge is duplicated. */
2320 cgraph_2edge_hook_list * GTY((skip)) m_first_edge_duplicated_hook;
2321 /* List of hooks triggered when a node is duplicated. */
2322 cgraph_2node_hook_list * GTY((skip)) m_first_cgraph_duplicated_hook;
2323 /* List of hooks triggered when an function is inserted. */
2324 cgraph_node_hook_list * GTY((skip)) m_first_cgraph_insertion_hook;
2325 /* List of hooks triggered when an variable is inserted. */
2326 varpool_node_hook_list * GTY((skip)) m_first_varpool_insertion_hook;
2327 /* List of hooks triggered when a node is removed. */
2328 varpool_node_hook_list * GTY((skip)) m_first_varpool_removal_hook;
2329 };
2330
2331 extern GTY(()) symbol_table *symtab;
2332
2333 extern vec<cgraph_node *> cgraph_new_nodes;
2334
2335 inline hashval_t
2336 asmname_hasher::hash (symtab_node *n)
2337 {
2338 return symbol_table::decl_assembler_name_hash
2339 (DECL_ASSEMBLER_NAME (n->decl));
2340 }
2341
2342 inline bool
2343 asmname_hasher::equal (symtab_node *n, const_tree t)
2344 {
2345 return symbol_table::decl_assembler_name_equal (n->decl, t);
2346 }
2347
2348 /* In cgraph.c */
2349 void cgraph_c_finalize (void);
2350 void release_function_body (tree);
2351 cgraph_indirect_call_info *cgraph_allocate_init_indirect_info (void);
2352
2353 void cgraph_update_edges_for_call_stmt (gimple *, tree, gimple *);
2354 bool cgraph_function_possibly_inlined_p (tree);
2355
2356 const char* cgraph_inline_failed_string (cgraph_inline_failed_t);
2357 cgraph_inline_failed_type_t cgraph_inline_failed_type (cgraph_inline_failed_t);
2358
2359 extern bool gimple_check_call_matching_types (gimple *, tree, bool);
2360
2361 /* In cgraphunit.c */
2362 void cgraphunit_c_finalize (void);
2363
2364 /* Initialize datastructures so DECL is a function in lowered gimple form.
2365 IN_SSA is true if the gimple is in SSA. */
2366 basic_block init_lowered_empty_function (tree, bool, profile_count);
2367
2368 tree thunk_adjust (gimple_stmt_iterator *, tree, bool, HOST_WIDE_INT, tree);
2369 /* In cgraphclones.c */
2370
2371 tree clone_function_name_1 (const char *, const char *);
2372 tree clone_function_name (tree decl, const char *);
2373
2374 void tree_function_versioning (tree, tree, vec<ipa_replace_map *, va_gc> *,
2375 bool, bitmap, bool, bitmap, basic_block);
2376
2377 void dump_callgraph_transformation (const cgraph_node *original,
2378 const cgraph_node *clone,
2379 const char *suffix);
2380 tree cgraph_build_function_type_skip_args (tree orig_type, bitmap args_to_skip,
2381 bool skip_return);
2382
2383 /* In cgraphbuild.c */
2384 int compute_call_stmt_bb_frequency (tree, basic_block bb);
2385 void record_references_in_initializer (tree, bool);
2386
2387 /* In ipa.c */
2388 void cgraph_build_static_cdtor (char which, tree body, int priority);
2389 bool ipa_discover_readonly_nonaddressable_vars (void);
2390
2391 /* In varpool.c */
2392 tree ctor_for_folding (tree);
2393
2394 /* In ipa-inline-analysis.c */
2395 void initialize_inline_failed (struct cgraph_edge *);
2396 bool speculation_useful_p (struct cgraph_edge *e, bool anticipate_inlining);
2397
2398 /* Return true when the symbol is real symbol, i.e. it is not inline clone
2399 or abstract function kept for debug info purposes only. */
2400 inline bool
2401 symtab_node::real_symbol_p (void)
2402 {
2403 cgraph_node *cnode;
2404
2405 if (DECL_ABSTRACT_P (decl))
2406 return false;
2407 if (transparent_alias && definition)
2408 return false;
2409 if (!is_a <cgraph_node *> (this))
2410 return true;
2411 cnode = dyn_cast <cgraph_node *> (this);
2412 if (cnode->global.inlined_to)
2413 return false;
2414 return true;
2415 }
2416
2417 /* Return true if DECL should have entry in symbol table if used.
2418 Those are functions and static & external veriables*/
2419
2420 static inline bool
2421 decl_in_symtab_p (const_tree decl)
2422 {
2423 return (TREE_CODE (decl) == FUNCTION_DECL
2424 || (TREE_CODE (decl) == VAR_DECL
2425 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl))));
2426 }
2427
2428 inline bool
2429 symtab_node::in_same_comdat_group_p (symtab_node *target)
2430 {
2431 symtab_node *source = this;
2432
2433 if (cgraph_node *cn = dyn_cast <cgraph_node *> (target))
2434 {
2435 if (cn->global.inlined_to)
2436 source = cn->global.inlined_to;
2437 }
2438 if (cgraph_node *cn = dyn_cast <cgraph_node *> (target))
2439 {
2440 if (cn->global.inlined_to)
2441 target = cn->global.inlined_to;
2442 }
2443
2444 return source->get_comdat_group () == target->get_comdat_group ();
2445 }
2446
2447 /* Return node that alias is aliasing. */
2448
2449 inline symtab_node *
2450 symtab_node::get_alias_target (void)
2451 {
2452 ipa_ref *ref = NULL;
2453 iterate_reference (0, ref);
2454 gcc_checking_assert (ref->use == IPA_REF_ALIAS);
2455 return ref->referred;
2456 }
2457
2458 /* Return next reachable static symbol with initializer after the node. */
2459
2460 inline symtab_node *
2461 symtab_node::next_defined_symbol (void)
2462 {
2463 symtab_node *node1 = next;
2464
2465 for (; node1; node1 = node1->next)
2466 if (node1->definition)
2467 return node1;
2468
2469 return NULL;
2470 }
2471
2472 /* Iterates I-th reference in the list, REF is also set. */
2473
2474 inline ipa_ref *
2475 symtab_node::iterate_reference (unsigned i, ipa_ref *&ref)
2476 {
2477 vec_safe_iterate (ref_list.references, i, &ref);
2478
2479 return ref;
2480 }
2481
2482 /* Iterates I-th referring item in the list, REF is also set. */
2483
2484 inline ipa_ref *
2485 symtab_node::iterate_referring (unsigned i, ipa_ref *&ref)
2486 {
2487 ref_list.referring.iterate (i, &ref);
2488
2489 return ref;
2490 }
2491
2492 /* Iterates I-th referring alias item in the list, REF is also set. */
2493
2494 inline ipa_ref *
2495 symtab_node::iterate_direct_aliases (unsigned i, ipa_ref *&ref)
2496 {
2497 ref_list.referring.iterate (i, &ref);
2498
2499 if (ref && ref->use != IPA_REF_ALIAS)
2500 return NULL;
2501
2502 return ref;
2503 }
2504
2505 /* Return true if list contains an alias. */
2506
2507 inline bool
2508 symtab_node::has_aliases_p (void)
2509 {
2510 ipa_ref *ref = NULL;
2511
2512 return (iterate_direct_aliases (0, ref) != NULL);
2513 }
2514
2515 /* Return true when RESOLUTION indicate that linker will use
2516 the symbol from non-LTO object files. */
2517
2518 inline bool
2519 resolution_used_from_other_file_p (enum ld_plugin_symbol_resolution resolution)
2520 {
2521 return (resolution == LDPR_PREVAILING_DEF
2522 || resolution == LDPR_PREEMPTED_REG
2523 || resolution == LDPR_RESOLVED_EXEC
2524 || resolution == LDPR_RESOLVED_DYN);
2525 }
2526
2527 /* Return true when symtab_node is known to be used from other (non-LTO)
2528 object file. Known only when doing LTO via linker plugin. */
2529
2530 inline bool
2531 symtab_node::used_from_object_file_p (void)
2532 {
2533 if (!TREE_PUBLIC (decl) || DECL_EXTERNAL (decl))
2534 return false;
2535 if (resolution_used_from_other_file_p (resolution))
2536 return true;
2537 return false;
2538 }
2539
2540 /* Return varpool node for given symbol and check it is a function. */
2541
2542 inline varpool_node *
2543 varpool_node::get (const_tree decl)
2544 {
2545 gcc_checking_assert (TREE_CODE (decl) == VAR_DECL);
2546 return dyn_cast<varpool_node *> (symtab_node::get (decl));
2547 }
2548
2549 /* Register a symbol NODE. */
2550
2551 inline void
2552 symbol_table::register_symbol (symtab_node *node)
2553 {
2554 node->next = nodes;
2555 node->previous = NULL;
2556
2557 if (nodes)
2558 nodes->previous = node;
2559 nodes = node;
2560
2561 node->order = order++;
2562 }
2563
2564 /* Register a top-level asm statement ASM_STR. */
2565
2566 asm_node *
2567 symbol_table::finalize_toplevel_asm (tree asm_str)
2568 {
2569 asm_node *node;
2570
2571 node = ggc_cleared_alloc<asm_node> ();
2572 node->asm_str = asm_str;
2573 node->order = order++;
2574 node->next = NULL;
2575
2576 if (asmnodes == NULL)
2577 asmnodes = node;
2578 else
2579 asm_last_node->next = node;
2580
2581 asm_last_node = node;
2582 return node;
2583 }
2584
2585 /* Unregister a symbol NODE. */
2586 inline void
2587 symbol_table::unregister (symtab_node *node)
2588 {
2589 if (node->previous)
2590 node->previous->next = node->next;
2591 else
2592 nodes = node->next;
2593
2594 if (node->next)
2595 node->next->previous = node->previous;
2596
2597 node->next = NULL;
2598 node->previous = NULL;
2599 }
2600
2601 /* Release a callgraph NODE with UID and put in to the list of free nodes. */
2602
2603 inline void
2604 symbol_table::release_symbol (cgraph_node *node)
2605 {
2606 cgraph_count--;
2607
2608 /* Clear out the node to NULL all pointers and add the node to the free
2609 list. */
2610 memset (node, 0, sizeof (*node));
2611 node->type = SYMTAB_FUNCTION;
2612 SET_NEXT_FREE_NODE (node, free_nodes);
2613 free_nodes = node;
2614 }
2615
2616 /* Allocate new callgraph node. */
2617
2618 inline cgraph_node *
2619 symbol_table::allocate_cgraph_symbol (void)
2620 {
2621 cgraph_node *node;
2622
2623 if (free_nodes)
2624 {
2625 node = free_nodes;
2626 free_nodes = NEXT_FREE_NODE (node);
2627 }
2628 else
2629 node = ggc_cleared_alloc<cgraph_node> ();
2630
2631 node->m_uid = cgraph_max_uid++;
2632 return node;
2633 }
2634
2635
2636 /* Return first static symbol with definition. */
2637 inline symtab_node *
2638 symbol_table::first_symbol (void)
2639 {
2640 return nodes;
2641 }
2642
2643 /* Walk all symbols. */
2644 #define FOR_EACH_SYMBOL(node) \
2645 for ((node) = symtab->first_symbol (); (node); (node) = (node)->next)
2646
2647 /* Return first static symbol with definition. */
2648 inline symtab_node *
2649 symbol_table::first_defined_symbol (void)
2650 {
2651 symtab_node *node;
2652
2653 for (node = nodes; node; node = node->next)
2654 if (node->definition)
2655 return node;
2656
2657 return NULL;
2658 }
2659
2660 /* Walk all symbols with definitions in current unit. */
2661 #define FOR_EACH_DEFINED_SYMBOL(node) \
2662 for ((node) = symtab->first_defined_symbol (); (node); \
2663 (node) = node->next_defined_symbol ())
2664
2665 /* Return first variable. */
2666 inline varpool_node *
2667 symbol_table::first_variable (void)
2668 {
2669 symtab_node *node;
2670 for (node = nodes; node; node = node->next)
2671 if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
2672 return vnode;
2673 return NULL;
2674 }
2675
2676 /* Return next variable after NODE. */
2677 inline varpool_node *
2678 symbol_table::next_variable (varpool_node *node)
2679 {
2680 symtab_node *node1 = node->next;
2681 for (; node1; node1 = node1->next)
2682 if (varpool_node *vnode1 = dyn_cast <varpool_node *> (node1))
2683 return vnode1;
2684 return NULL;
2685 }
2686 /* Walk all variables. */
2687 #define FOR_EACH_VARIABLE(node) \
2688 for ((node) = symtab->first_variable (); \
2689 (node); \
2690 (node) = symtab->next_variable ((node)))
2691
2692 /* Return first static variable with initializer. */
2693 inline varpool_node *
2694 symbol_table::first_static_initializer (void)
2695 {
2696 symtab_node *node;
2697 for (node = nodes; node; node = node->next)
2698 {
2699 varpool_node *vnode = dyn_cast <varpool_node *> (node);
2700 if (vnode && DECL_INITIAL (node->decl))
2701 return vnode;
2702 }
2703 return NULL;
2704 }
2705
2706 /* Return next static variable with initializer after NODE. */
2707 inline varpool_node *
2708 symbol_table::next_static_initializer (varpool_node *node)
2709 {
2710 symtab_node *node1 = node->next;
2711 for (; node1; node1 = node1->next)
2712 {
2713 varpool_node *vnode1 = dyn_cast <varpool_node *> (node1);
2714 if (vnode1 && DECL_INITIAL (node1->decl))
2715 return vnode1;
2716 }
2717 return NULL;
2718 }
2719
2720 /* Walk all static variables with initializer set. */
2721 #define FOR_EACH_STATIC_INITIALIZER(node) \
2722 for ((node) = symtab->first_static_initializer (); (node); \
2723 (node) = symtab->next_static_initializer (node))
2724
2725 /* Return first static variable with definition. */
2726 inline varpool_node *
2727 symbol_table::first_defined_variable (void)
2728 {
2729 symtab_node *node;
2730 for (node = nodes; node; node = node->next)
2731 {
2732 varpool_node *vnode = dyn_cast <varpool_node *> (node);
2733 if (vnode && vnode->definition)
2734 return vnode;
2735 }
2736 return NULL;
2737 }
2738
2739 /* Return next static variable with definition after NODE. */
2740 inline varpool_node *
2741 symbol_table::next_defined_variable (varpool_node *node)
2742 {
2743 symtab_node *node1 = node->next;
2744 for (; node1; node1 = node1->next)
2745 {
2746 varpool_node *vnode1 = dyn_cast <varpool_node *> (node1);
2747 if (vnode1 && vnode1->definition)
2748 return vnode1;
2749 }
2750 return NULL;
2751 }
2752 /* Walk all variables with definitions in current unit. */
2753 #define FOR_EACH_DEFINED_VARIABLE(node) \
2754 for ((node) = symtab->first_defined_variable (); (node); \
2755 (node) = symtab->next_defined_variable (node))
2756
2757 /* Return first function with body defined. */
2758 inline cgraph_node *
2759 symbol_table::first_defined_function (void)
2760 {
2761 symtab_node *node;
2762 for (node = nodes; node; node = node->next)
2763 {
2764 cgraph_node *cn = dyn_cast <cgraph_node *> (node);
2765 if (cn && cn->definition)
2766 return cn;
2767 }
2768 return NULL;
2769 }
2770
2771 /* Return next function with body defined after NODE. */
2772 inline cgraph_node *
2773 symbol_table::next_defined_function (cgraph_node *node)
2774 {
2775 symtab_node *node1 = node->next;
2776 for (; node1; node1 = node1->next)
2777 {
2778 cgraph_node *cn1 = dyn_cast <cgraph_node *> (node1);
2779 if (cn1 && cn1->definition)
2780 return cn1;
2781 }
2782 return NULL;
2783 }
2784
2785 /* Walk all functions with body defined. */
2786 #define FOR_EACH_DEFINED_FUNCTION(node) \
2787 for ((node) = symtab->first_defined_function (); (node); \
2788 (node) = symtab->next_defined_function ((node)))
2789
2790 /* Return first function. */
2791 inline cgraph_node *
2792 symbol_table::first_function (void)
2793 {
2794 symtab_node *node;
2795 for (node = nodes; node; node = node->next)
2796 if (cgraph_node *cn = dyn_cast <cgraph_node *> (node))
2797 return cn;
2798 return NULL;
2799 }
2800
2801 /* Return next function. */
2802 inline cgraph_node *
2803 symbol_table::next_function (cgraph_node *node)
2804 {
2805 symtab_node *node1 = node->next;
2806 for (; node1; node1 = node1->next)
2807 if (cgraph_node *cn1 = dyn_cast <cgraph_node *> (node1))
2808 return cn1;
2809 return NULL;
2810 }
2811
2812 /* Return first function with body defined. */
2813 inline cgraph_node *
2814 symbol_table::first_function_with_gimple_body (void)
2815 {
2816 symtab_node *node;
2817 for (node = nodes; node; node = node->next)
2818 {
2819 cgraph_node *cn = dyn_cast <cgraph_node *> (node);
2820 if (cn && cn->has_gimple_body_p ())
2821 return cn;
2822 }
2823 return NULL;
2824 }
2825
2826 /* Return next reachable static variable with initializer after NODE. */
2827 inline cgraph_node *
2828 symbol_table::next_function_with_gimple_body (cgraph_node *node)
2829 {
2830 symtab_node *node1 = node->next;
2831 for (; node1; node1 = node1->next)
2832 {
2833 cgraph_node *cn1 = dyn_cast <cgraph_node *> (node1);
2834 if (cn1 && cn1->has_gimple_body_p ())
2835 return cn1;
2836 }
2837 return NULL;
2838 }
2839
2840 /* Walk all functions. */
2841 #define FOR_EACH_FUNCTION(node) \
2842 for ((node) = symtab->first_function (); (node); \
2843 (node) = symtab->next_function ((node)))
2844
2845 /* Return true when callgraph node is a function with Gimple body defined
2846 in current unit. Functions can also be define externally or they
2847 can be thunks with no Gimple representation.
2848
2849 Note that at WPA stage, the function body may not be present in memory. */
2850
2851 inline bool
2852 cgraph_node::has_gimple_body_p (void)
2853 {
2854 return definition && !thunk.thunk_p && !alias;
2855 }
2856
2857 /* Walk all functions with body defined. */
2858 #define FOR_EACH_FUNCTION_WITH_GIMPLE_BODY(node) \
2859 for ((node) = symtab->first_function_with_gimple_body (); (node); \
2860 (node) = symtab->next_function_with_gimple_body (node))
2861
2862 /* Uniquize all constants that appear in memory.
2863 Each constant in memory thus far output is recorded
2864 in `const_desc_table'. */
2865
2866 struct GTY((for_user)) constant_descriptor_tree {
2867 /* A MEM for the constant. */
2868 rtx rtl;
2869
2870 /* The value of the constant. */
2871 tree value;
2872
2873 /* Hash of value. Computing the hash from value each time
2874 hashfn is called can't work properly, as that means recursive
2875 use of the hash table during hash table expansion. */
2876 hashval_t hash;
2877 };
2878
2879 /* Return true when function is only called directly or it has alias.
2880 i.e. it is not externally visible, address was not taken and
2881 it is not used in any other non-standard way. */
2882
2883 inline bool
2884 cgraph_node::only_called_directly_or_aliased_p (void)
2885 {
2886 gcc_assert (!global.inlined_to);
2887 return (!force_output && !address_taken
2888 && !ifunc_resolver
2889 && !used_from_other_partition
2890 && !DECL_VIRTUAL_P (decl)
2891 && !DECL_STATIC_CONSTRUCTOR (decl)
2892 && !DECL_STATIC_DESTRUCTOR (decl)
2893 && !used_from_object_file_p ()
2894 && !externally_visible);
2895 }
2896
2897 /* Return true when function can be removed from callgraph
2898 if all direct calls are eliminated. */
2899
2900 inline bool
2901 cgraph_node::can_remove_if_no_direct_calls_and_refs_p (void)
2902 {
2903 gcc_checking_assert (!global.inlined_to);
2904 /* Extern inlines can always go, we will use the external definition. */
2905 if (DECL_EXTERNAL (decl))
2906 return true;
2907 /* When function is needed, we can not remove it. */
2908 if (force_output || used_from_other_partition)
2909 return false;
2910 if (DECL_STATIC_CONSTRUCTOR (decl)
2911 || DECL_STATIC_DESTRUCTOR (decl))
2912 return false;
2913 /* Only COMDAT functions can be removed if externally visible. */
2914 if (externally_visible
2915 && (!DECL_COMDAT (decl)
2916 || forced_by_abi
2917 || used_from_object_file_p ()))
2918 return false;
2919 return true;
2920 }
2921
2922 /* Verify cgraph, if consistency checking is enabled. */
2923
2924 inline void
2925 cgraph_node::checking_verify_cgraph_nodes (void)
2926 {
2927 if (flag_checking)
2928 cgraph_node::verify_cgraph_nodes ();
2929 }
2930
2931 /* Return true when variable can be removed from variable pool
2932 if all direct calls are eliminated. */
2933
2934 inline bool
2935 varpool_node::can_remove_if_no_refs_p (void)
2936 {
2937 if (DECL_EXTERNAL (decl))
2938 return true;
2939 return (!force_output && !used_from_other_partition
2940 && ((DECL_COMDAT (decl)
2941 && !forced_by_abi
2942 && !used_from_object_file_p ())
2943 || !externally_visible
2944 || DECL_HAS_VALUE_EXPR_P (decl)));
2945 }
2946
2947 /* Return true when all references to variable must be visible in ipa_ref_list.
2948 i.e. if the variable is not externally visible or not used in some magic
2949 way (asm statement or such).
2950 The magic uses are all summarized in force_output flag. */
2951
2952 inline bool
2953 varpool_node::all_refs_explicit_p ()
2954 {
2955 return (definition
2956 && !externally_visible
2957 && !used_from_other_partition
2958 && !force_output);
2959 }
2960
2961 struct tree_descriptor_hasher : ggc_ptr_hash<constant_descriptor_tree>
2962 {
2963 static hashval_t hash (constant_descriptor_tree *);
2964 static bool equal (constant_descriptor_tree *, constant_descriptor_tree *);
2965 };
2966
2967 /* Constant pool accessor function. */
2968 hash_table<tree_descriptor_hasher> *constant_pool_htab (void);
2969
2970 /* Return node that alias is aliasing. */
2971
2972 inline cgraph_node *
2973 cgraph_node::get_alias_target (void)
2974 {
2975 return dyn_cast <cgraph_node *> (symtab_node::get_alias_target ());
2976 }
2977
2978 /* Return node that alias is aliasing. */
2979
2980 inline varpool_node *
2981 varpool_node::get_alias_target (void)
2982 {
2983 return dyn_cast <varpool_node *> (symtab_node::get_alias_target ());
2984 }
2985
2986 /* Walk the alias chain to return the symbol NODE is alias of.
2987 If NODE is not an alias, return NODE.
2988 When AVAILABILITY is non-NULL, get minimal availability in the chain.
2989 When REF is non-NULL, assume that reference happens in symbol REF
2990 when determining the availability. */
2991
2992 inline symtab_node *
2993 symtab_node::ultimate_alias_target (enum availability *availability,
2994 symtab_node *ref)
2995 {
2996 if (!alias)
2997 {
2998 if (availability)
2999 *availability = get_availability (ref);
3000 return this;
3001 }
3002
3003 return ultimate_alias_target_1 (availability, ref);
3004 }
3005
3006 /* Given function symbol, walk the alias chain to return the function node
3007 is alias of. Do not walk through thunks.
3008 When AVAILABILITY is non-NULL, get minimal availability in the chain.
3009 When REF is non-NULL, assume that reference happens in symbol REF
3010 when determining the availability. */
3011
3012 inline cgraph_node *
3013 cgraph_node::ultimate_alias_target (enum availability *availability,
3014 symtab_node *ref)
3015 {
3016 cgraph_node *n = dyn_cast <cgraph_node *>
3017 (symtab_node::ultimate_alias_target (availability, ref));
3018 if (!n && availability)
3019 *availability = AVAIL_NOT_AVAILABLE;
3020 return n;
3021 }
3022
3023 /* For given variable pool node, walk the alias chain to return the function
3024 the variable is alias of. Do not walk through thunks.
3025 When AVAILABILITY is non-NULL, get minimal availability in the chain.
3026 When REF is non-NULL, assume that reference happens in symbol REF
3027 when determining the availability. */
3028
3029 inline varpool_node *
3030 varpool_node::ultimate_alias_target (availability *availability,
3031 symtab_node *ref)
3032 {
3033 varpool_node *n = dyn_cast <varpool_node *>
3034 (symtab_node::ultimate_alias_target (availability, ref));
3035
3036 if (!n && availability)
3037 *availability = AVAIL_NOT_AVAILABLE;
3038 return n;
3039 }
3040
3041 /* Set callee N of call graph edge and add it to the corresponding set of
3042 callers. */
3043
3044 inline void
3045 cgraph_edge::set_callee (cgraph_node *n)
3046 {
3047 prev_caller = NULL;
3048 if (n->callers)
3049 n->callers->prev_caller = this;
3050 next_caller = n->callers;
3051 n->callers = this;
3052 callee = n;
3053 }
3054
3055 /* Redirect callee of the edge to N. The function does not update underlying
3056 call expression. */
3057
3058 inline void
3059 cgraph_edge::redirect_callee (cgraph_node *n)
3060 {
3061 /* Remove from callers list of the current callee. */
3062 remove_callee ();
3063
3064 /* Insert to callers list of the new callee. */
3065 set_callee (n);
3066 }
3067
3068 /* Return true when the edge represents a direct recursion. */
3069
3070 inline bool
3071 cgraph_edge::recursive_p (void)
3072 {
3073 cgraph_node *c = callee->ultimate_alias_target ();
3074 if (caller->global.inlined_to)
3075 return caller->global.inlined_to->decl == c->decl;
3076 else
3077 return caller->decl == c->decl;
3078 }
3079
3080 /* Remove the edge from the list of the callers of the callee. */
3081
3082 inline void
3083 cgraph_edge::remove_callee (void)
3084 {
3085 gcc_assert (!indirect_unknown_callee);
3086 if (prev_caller)
3087 prev_caller->next_caller = next_caller;
3088 if (next_caller)
3089 next_caller->prev_caller = prev_caller;
3090 if (!prev_caller)
3091 callee->callers = next_caller;
3092 }
3093
3094 /* Return true if call must bind to current definition. */
3095
3096 inline bool
3097 cgraph_edge::binds_to_current_def_p ()
3098 {
3099 if (callee)
3100 return callee->binds_to_current_def_p (caller);
3101 else
3102 return false;
3103 }
3104
3105 /* Expected frequency of executions within the function.
3106 When set to CGRAPH_FREQ_BASE, the edge is expected to be called once
3107 per function call. The range is 0 to CGRAPH_FREQ_MAX. */
3108
3109 inline int
3110 cgraph_edge::frequency ()
3111 {
3112 return count.to_cgraph_frequency (caller->global.inlined_to
3113 ? caller->global.inlined_to->count
3114 : caller->count);
3115 }
3116
3117
3118 /* Return true if the TM_CLONE bit is set for a given FNDECL. */
3119 static inline bool
3120 decl_is_tm_clone (const_tree fndecl)
3121 {
3122 cgraph_node *n = cgraph_node::get (fndecl);
3123 if (n)
3124 return n->tm_clone;
3125 return false;
3126 }
3127
3128 /* Likewise indicate that a node is needed, i.e. reachable via some
3129 external means. */
3130
3131 inline void
3132 cgraph_node::mark_force_output (void)
3133 {
3134 force_output = 1;
3135 gcc_checking_assert (!global.inlined_to);
3136 }
3137
3138 /* Return true if function should be optimized for size. */
3139
3140 inline bool
3141 cgraph_node::optimize_for_size_p (void)
3142 {
3143 if (opt_for_fn (decl, optimize_size))
3144 return true;
3145 if (frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
3146 return true;
3147 else
3148 return false;
3149 }
3150
3151 /* Return symtab_node for NODE or create one if it is not present
3152 in symtab. */
3153
3154 inline symtab_node *
3155 symtab_node::get_create (tree node)
3156 {
3157 if (TREE_CODE (node) == VAR_DECL)
3158 return varpool_node::get_create (node);
3159 else
3160 return cgraph_node::get_create (node);
3161 }
3162
3163 /* Return availability of NODE when referenced from REF. */
3164
3165 inline enum availability
3166 symtab_node::get_availability (symtab_node *ref)
3167 {
3168 if (is_a <cgraph_node *> (this))
3169 return dyn_cast <cgraph_node *> (this)->get_availability (ref);
3170 else
3171 return dyn_cast <varpool_node *> (this)->get_availability (ref);
3172 }
3173
3174 /* Call calback on symtab node and aliases associated to this node.
3175 When INCLUDE_OVERWRITABLE is false, overwritable symbols are skipped. */
3176
3177 inline bool
3178 symtab_node::call_for_symbol_and_aliases (bool (*callback) (symtab_node *,
3179 void *),
3180 void *data,
3181 bool include_overwritable)
3182 {
3183 if (include_overwritable
3184 || get_availability () > AVAIL_INTERPOSABLE)
3185 {
3186 if (callback (this, data))
3187 return true;
3188 }
3189 if (has_aliases_p ())
3190 return call_for_symbol_and_aliases_1 (callback, data, include_overwritable);
3191 return false;
3192 }
3193
3194 /* Call callback on function and aliases associated to the function.
3195 When INCLUDE_OVERWRITABLE is false, overwritable symbols are
3196 skipped. */
3197
3198 inline bool
3199 cgraph_node::call_for_symbol_and_aliases (bool (*callback) (cgraph_node *,
3200 void *),
3201 void *data,
3202 bool include_overwritable)
3203 {
3204 if (include_overwritable
3205 || get_availability () > AVAIL_INTERPOSABLE)
3206 {
3207 if (callback (this, data))
3208 return true;
3209 }
3210 if (has_aliases_p ())
3211 return call_for_symbol_and_aliases_1 (callback, data, include_overwritable);
3212 return false;
3213 }
3214
3215 /* Call calback on varpool symbol and aliases associated to varpool symbol.
3216 When INCLUDE_OVERWRITABLE is false, overwritable symbols are
3217 skipped. */
3218
3219 inline bool
3220 varpool_node::call_for_symbol_and_aliases (bool (*callback) (varpool_node *,
3221 void *),
3222 void *data,
3223 bool include_overwritable)
3224 {
3225 if (include_overwritable
3226 || get_availability () > AVAIL_INTERPOSABLE)
3227 {
3228 if (callback (this, data))
3229 return true;
3230 }
3231 if (has_aliases_p ())
3232 return call_for_symbol_and_aliases_1 (callback, data, include_overwritable);
3233 return false;
3234 }
3235
3236 /* Return true if refernece may be used in address compare. */
3237
3238 inline bool
3239 ipa_ref::address_matters_p ()
3240 {
3241 if (use != IPA_REF_ADDR)
3242 return false;
3243 /* Addresses taken from virtual tables are never compared. */
3244 if (is_a <varpool_node *> (referring)
3245 && DECL_VIRTUAL_P (referring->decl))
3246 return false;
3247 return referred->address_can_be_compared_p ();
3248 }
3249
3250 /* Build polymorphic call context for indirect call E. */
3251
3252 inline
3253 ipa_polymorphic_call_context::ipa_polymorphic_call_context (cgraph_edge *e)
3254 {
3255 gcc_checking_assert (e->indirect_info->polymorphic);
3256 *this = e->indirect_info->context;
3257 }
3258
3259 /* Build empty "I know nothing" context. */
3260
3261 inline
3262 ipa_polymorphic_call_context::ipa_polymorphic_call_context ()
3263 {
3264 clear_speculation ();
3265 clear_outer_type ();
3266 invalid = false;
3267 }
3268
3269 /* Make context non-speculative. */
3270
3271 inline void
3272 ipa_polymorphic_call_context::clear_speculation ()
3273 {
3274 speculative_outer_type = NULL;
3275 speculative_offset = 0;
3276 speculative_maybe_derived_type = false;
3277 }
3278
3279 /* Produce context specifying all derrived types of OTR_TYPE. If OTR_TYPE is
3280 NULL, the context is set to dummy "I know nothing" setting. */
3281
3282 inline void
3283 ipa_polymorphic_call_context::clear_outer_type (tree otr_type)
3284 {
3285 outer_type = otr_type ? TYPE_MAIN_VARIANT (otr_type) : NULL;
3286 offset = 0;
3287 maybe_derived_type = true;
3288 maybe_in_construction = true;
3289 dynamic = true;
3290 }
3291
3292 /* Adjust all offsets in contexts by OFF bits. */
3293
3294 inline void
3295 ipa_polymorphic_call_context::offset_by (HOST_WIDE_INT off)
3296 {
3297 if (outer_type)
3298 offset += off;
3299 if (speculative_outer_type)
3300 speculative_offset += off;
3301 }
3302
3303 /* Return TRUE if context is fully useless. */
3304
3305 inline bool
3306 ipa_polymorphic_call_context::useless_p () const
3307 {
3308 return (!outer_type && !speculative_outer_type);
3309 }
3310
3311 /* When using fprintf (or similar), problems can arise with
3312 transient generated strings. Many string-generation APIs
3313 only support one result being alive at once (e.g. by
3314 returning a pointer to a statically-allocated buffer).
3315
3316 If there is more than one generated string within one
3317 fprintf call: the first string gets evicted or overwritten
3318 by the second, before fprintf is fully evaluated.
3319 See e.g. PR/53136.
3320
3321 This function provides a workaround for this, by providing
3322 a simple way to create copies of these transient strings,
3323 without the need to have explicit cleanup:
3324
3325 fprintf (dumpfile, "string 1: %s string 2:%s\n",
3326 xstrdup_for_dump (EXPR_1),
3327 xstrdup_for_dump (EXPR_2));
3328
3329 This is actually a simple wrapper around ggc_strdup, but
3330 the name documents the intent. We require that no GC can occur
3331 within the fprintf call. */
3332
3333 static inline const char *
3334 xstrdup_for_dump (const char *transient_str)
3335 {
3336 return ggc_strdup (transient_str);
3337 }
3338
3339 #endif /* GCC_CGRAPH_H */