re PR target/65697 (__atomic memory barriers not strong enough for __sync builtins)
[gcc.git] / gcc / cgraph.h
1 /* Callgraph handling code.
2 Copyright (C) 2003-2015 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #ifndef GCC_CGRAPH_H
22 #define GCC_CGRAPH_H
23
24 #include "ipa-ref.h"
25 #include "plugin-api.h"
26
27 class ipa_opt_pass_d;
28 typedef ipa_opt_pass_d *ipa_opt_pass;
29
30 /* Symbol table consists of functions and variables.
31 TODO: add labels and CONST_DECLs. */
32 enum symtab_type
33 {
34 SYMTAB_SYMBOL,
35 SYMTAB_FUNCTION,
36 SYMTAB_VARIABLE
37 };
38
39 /* Section names are stored as reference counted strings in GGC safe hashtable
40 (to make them survive through PCH). */
41
42 struct GTY((for_user)) section_hash_entry_d
43 {
44 int ref_count;
45 char *name; /* As long as this datastructure stays in GGC, we can not put
46 string at the tail of structure of GGC dies in horrible
47 way */
48 };
49
50 typedef struct section_hash_entry_d section_hash_entry;
51
52 struct section_name_hasher : ggc_ptr_hash<section_hash_entry>
53 {
54 typedef const char *compare_type;
55
56 static hashval_t hash (section_hash_entry *);
57 static bool equal (section_hash_entry *, const char *);
58 };
59
60 enum availability
61 {
62 /* Not yet set by cgraph_function_body_availability. */
63 AVAIL_UNSET,
64 /* Function body/variable initializer is unknown. */
65 AVAIL_NOT_AVAILABLE,
66 /* Function body/variable initializer is known but might be replaced
67 by a different one from other compilation unit and thus needs to
68 be dealt with a care. Like AVAIL_NOT_AVAILABLE it can have
69 arbitrary side effects on escaping variables and functions, while
70 like AVAILABLE it might access static variables. */
71 AVAIL_INTERPOSABLE,
72 /* Function body/variable initializer is known and will be used in final
73 program. */
74 AVAIL_AVAILABLE,
75 /* Function body/variable initializer is known and all it's uses are
76 explicitly visible within current unit (ie it's address is never taken and
77 it is not exported to other units). Currently used only for functions. */
78 AVAIL_LOCAL
79 };
80
81 /* Classification of symbols WRT partitioning. */
82 enum symbol_partitioning_class
83 {
84 /* External declarations are ignored by partitioning algorithms and they are
85 added into the boundary later via compute_ltrans_boundary. */
86 SYMBOL_EXTERNAL,
87 /* Partitioned symbols are pur into one of partitions. */
88 SYMBOL_PARTITION,
89 /* Duplicated symbols (such as comdat or constant pool references) are
90 copied into every node needing them via add_symbol_to_partition. */
91 SYMBOL_DUPLICATE
92 };
93
94 /* Base of all entries in the symbol table.
95 The symtab_node is inherited by cgraph and varpol nodes. */
96 class GTY((desc ("%h.type"), tag ("SYMTAB_SYMBOL"),
97 chain_next ("%h.next"), chain_prev ("%h.previous")))
98 symtab_node
99 {
100 public:
101 /* Return name. */
102 const char *name () const;
103
104 /* Return asm name. */
105 const char * asm_name () const;
106
107 /* Add node into symbol table. This function is not used directly, but via
108 cgraph/varpool node creation routines. */
109 void register_symbol (void);
110
111 /* Remove symbol from symbol table. */
112 void remove (void);
113
114 /* Dump symtab node to F. */
115 void dump (FILE *f);
116
117 /* Dump symtab node to stderr. */
118 void DEBUG_FUNCTION debug (void);
119
120 /* Verify consistency of node. */
121 void DEBUG_FUNCTION verify (void);
122
123 /* Return ipa reference from this symtab_node to
124 REFERED_NODE or REFERED_VARPOOL_NODE. USE_TYPE specify type
125 of the use and STMT the statement (if it exists). */
126 ipa_ref *create_reference (symtab_node *referred_node,
127 enum ipa_ref_use use_type);
128
129 /* Return ipa reference from this symtab_node to
130 REFERED_NODE or REFERED_VARPOOL_NODE. USE_TYPE specify type
131 of the use and STMT the statement (if it exists). */
132 ipa_ref *create_reference (symtab_node *referred_node,
133 enum ipa_ref_use use_type, gimple stmt);
134
135 /* If VAL is a reference to a function or a variable, add a reference from
136 this symtab_node to the corresponding symbol table node. USE_TYPE specify
137 type of the use and STMT the statement (if it exists). Return the new
138 reference or NULL if none was created. */
139 ipa_ref *maybe_create_reference (tree val, enum ipa_ref_use use_type,
140 gimple stmt);
141
142 /* Clone all references from symtab NODE to this symtab_node. */
143 void clone_references (symtab_node *node);
144
145 /* Remove all stmt references in non-speculative references.
146 Those are not maintained during inlining & clonning.
147 The exception are speculative references that are updated along
148 with callgraph edges associated with them. */
149 void clone_referring (symtab_node *node);
150
151 /* Clone reference REF to this symtab_node and set its stmt to STMT. */
152 ipa_ref *clone_reference (ipa_ref *ref, gimple stmt);
153
154 /* Find the structure describing a reference to REFERRED_NODE
155 and associated with statement STMT. */
156 ipa_ref *find_reference (symtab_node *referred_node, gimple stmt,
157 unsigned int lto_stmt_uid);
158
159 /* Remove all references that are associated with statement STMT. */
160 void remove_stmt_references (gimple stmt);
161
162 /* Remove all stmt references in non-speculative references.
163 Those are not maintained during inlining & clonning.
164 The exception are speculative references that are updated along
165 with callgraph edges associated with them. */
166 void clear_stmts_in_references (void);
167
168 /* Remove all references in ref list. */
169 void remove_all_references (void);
170
171 /* Remove all referring items in ref list. */
172 void remove_all_referring (void);
173
174 /* Dump references in ref list to FILE. */
175 void dump_references (FILE *file);
176
177 /* Dump referring in list to FILE. */
178 void dump_referring (FILE *);
179
180 /* Get number of references for this node. */
181 inline unsigned num_references (void)
182 {
183 return ref_list.references ? ref_list.references->length () : 0;
184 }
185
186 /* Iterates I-th reference in the list, REF is also set. */
187 ipa_ref *iterate_reference (unsigned i, ipa_ref *&ref);
188
189 /* Iterates I-th referring item in the list, REF is also set. */
190 ipa_ref *iterate_referring (unsigned i, ipa_ref *&ref);
191
192 /* Iterates I-th referring alias item in the list, REF is also set. */
193 ipa_ref *iterate_direct_aliases (unsigned i, ipa_ref *&ref);
194
195 /* Return true if symtab node and TARGET represents
196 semantically equivalent symbols. */
197 bool semantically_equivalent_p (symtab_node *target);
198
199 /* Classify symbol symtab node for partitioning. */
200 enum symbol_partitioning_class get_partitioning_class (void);
201
202 /* Return comdat group. */
203 tree get_comdat_group ()
204 {
205 return x_comdat_group;
206 }
207
208 /* Return comdat group as identifier_node. */
209 tree get_comdat_group_id ()
210 {
211 if (x_comdat_group && TREE_CODE (x_comdat_group) != IDENTIFIER_NODE)
212 x_comdat_group = DECL_ASSEMBLER_NAME (x_comdat_group);
213 return x_comdat_group;
214 }
215
216 /* Set comdat group. */
217 void set_comdat_group (tree group)
218 {
219 gcc_checking_assert (!group || TREE_CODE (group) == IDENTIFIER_NODE
220 || DECL_P (group));
221 x_comdat_group = group;
222 }
223
224 /* Return section as string. */
225 const char * get_section ()
226 {
227 if (!x_section)
228 return NULL;
229 return x_section->name;
230 }
231
232 /* Remove node from same comdat group. */
233 void remove_from_same_comdat_group (void);
234
235 /* Add this symtab_node to the same comdat group that OLD is in. */
236 void add_to_same_comdat_group (symtab_node *old_node);
237
238 /* Dissolve the same_comdat_group list in which NODE resides. */
239 void dissolve_same_comdat_group_list (void);
240
241 /* Return true when symtab_node is known to be used from other (non-LTO)
242 object file. Known only when doing LTO via linker plugin. */
243 bool used_from_object_file_p (void);
244
245 /* Walk the alias chain to return the symbol NODE is alias of.
246 If NODE is not an alias, return NODE.
247 When AVAILABILITY is non-NULL, get minimal availability in the chain. */
248 symtab_node *ultimate_alias_target (enum availability *avail = NULL);
249
250 /* Return next reachable static symbol with initializer after NODE. */
251 inline symtab_node *next_defined_symbol (void);
252
253 /* Add reference recording that symtab node is alias of TARGET.
254 The function can fail in the case of aliasing cycles; in this case
255 it returns false. */
256 bool resolve_alias (symtab_node *target);
257
258 /* C++ FE sometimes change linkage flags after producing same
259 body aliases. */
260 void fixup_same_cpp_alias_visibility (symtab_node *target);
261
262 /* Call callback on symtab node and aliases associated to this node.
263 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
264 skipped. */
265 bool call_for_symbol_and_aliases (bool (*callback) (symtab_node *, void *),
266 void *data,
267 bool include_overwrite);
268
269 /* If node can not be interposable by static or dynamic linker to point to
270 different definition, return this symbol. Otherwise look for alias with
271 such property and if none exists, introduce new one. */
272 symtab_node *noninterposable_alias (void);
273
274 /* Return node that alias is aliasing. */
275 inline symtab_node *get_alias_target (void);
276
277 /* Set section for symbol and its aliases. */
278 void set_section (const char *section);
279
280 /* Set section, do not recurse into aliases.
281 When one wants to change section of symbol and its aliases,
282 use set_section. */
283 void set_section_for_node (const char *section);
284
285 /* Set initialization priority to PRIORITY. */
286 void set_init_priority (priority_type priority);
287
288 /* Return the initialization priority. */
289 priority_type get_init_priority ();
290
291 /* Return availability of NODE. */
292 enum availability get_availability (void);
293
294 /* Make DECL local. */
295 void make_decl_local (void);
296
297 /* Return desired alignment of the definition. This is NOT alignment useful
298 to access THIS, because THIS may be interposable and DECL_ALIGN should
299 be used instead. It however must be guaranteed when output definition
300 of THIS. */
301 unsigned int definition_alignment ();
302
303 /* Return true if alignment can be increased. */
304 bool can_increase_alignment_p ();
305
306 /* Increase alignment of symbol to ALIGN. */
307 void increase_alignment (unsigned int align);
308
309 /* Return true if list contains an alias. */
310 bool has_aliases_p (void);
311
312 /* Return true when the symbol is real symbol, i.e. it is not inline clone
313 or abstract function kept for debug info purposes only. */
314 bool real_symbol_p (void);
315
316 /* Determine if symbol declaration is needed. That is, visible to something
317 either outside this translation unit, something magic in the system
318 configury. This function is used just during symbol creation. */
319 bool needed_p (void);
320
321 /* Return true when there are references to the node. */
322 bool referred_to_p (bool include_self = true);
323
324 /* Return true if NODE can be discarded by linker from the binary. */
325 inline bool
326 can_be_discarded_p (void)
327 {
328 return (DECL_EXTERNAL (decl)
329 || (get_comdat_group ()
330 && resolution != LDPR_PREVAILING_DEF
331 && resolution != LDPR_PREVAILING_DEF_IRONLY
332 && resolution != LDPR_PREVAILING_DEF_IRONLY_EXP));
333 }
334
335 /* Return true if NODE is local to a particular COMDAT group, and must not
336 be named from outside the COMDAT. This is used for C++ decloned
337 constructors. */
338 inline bool comdat_local_p (void)
339 {
340 return (same_comdat_group && !TREE_PUBLIC (decl));
341 }
342
343 /* Return true if ONE and TWO are part of the same COMDAT group. */
344 inline bool in_same_comdat_group_p (symtab_node *target);
345
346 /* Return true if symbol is known to be nonzero. */
347 bool nonzero_address ();
348
349 /* Return 0 if symbol is known to have different address than S2,
350 Return 1 if symbol is known to have same address as S2,
351 return 2 otherwise. */
352 int equal_address_to (symtab_node *s2);
353
354 /* Return true if symbol's address may possibly be compared to other
355 symbol's address. */
356 bool address_matters_p ();
357
358 /* Return true if NODE's address can be compared. This use properties
359 of NODE only and does not look if the address is actually taken in
360 interesting way. For that use ADDRESS_MATTERS_P instead. */
361 bool address_can_be_compared_p (void);
362
363 /* Return symbol table node associated with DECL, if any,
364 and NULL otherwise. */
365 static inline symtab_node *get (const_tree decl)
366 {
367 #ifdef ENABLE_CHECKING
368 /* Check that we are called for sane type of object - functions
369 and static or external variables. */
370 gcc_checking_assert (TREE_CODE (decl) == FUNCTION_DECL
371 || (TREE_CODE (decl) == VAR_DECL
372 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)
373 || in_lto_p)));
374 /* Check that the mapping is sane - perhaps this check can go away,
375 but at the moment frontends tends to corrupt the mapping by calling
376 memcpy/memset on the tree nodes. */
377 gcc_checking_assert (!decl->decl_with_vis.symtab_node
378 || decl->decl_with_vis.symtab_node->decl == decl);
379 #endif
380 return decl->decl_with_vis.symtab_node;
381 }
382
383 /* Try to find a symtab node for declaration DECL and if it does not
384 exist or if it corresponds to an inline clone, create a new one. */
385 static inline symtab_node * get_create (tree node);
386
387 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
388 Return NULL if there's no such node. */
389 static symtab_node *get_for_asmname (const_tree asmname);
390
391 /* Dump symbol table to F. */
392 static void dump_table (FILE *);
393
394 /* Dump symbol table to stderr. */
395 static inline DEBUG_FUNCTION void debug_symtab (void)
396 {
397 dump_table (stderr);
398 }
399
400 /* Verify symbol table for internal consistency. */
401 static DEBUG_FUNCTION void verify_symtab_nodes (void);
402
403 /* Type of the symbol. */
404 ENUM_BITFIELD (symtab_type) type : 8;
405
406 /* The symbols resolution. */
407 ENUM_BITFIELD (ld_plugin_symbol_resolution) resolution : 8;
408
409 /*** Flags representing the symbol type. ***/
410
411 /* True when symbol corresponds to a definition in current unit.
412 set via finalize_function or finalize_decl */
413 unsigned definition : 1;
414 /* True when symbol is an alias.
415 Set by ssemble_alias. */
416 unsigned alias : 1;
417 /* True when alias is a weakref. */
418 unsigned weakref : 1;
419 /* C++ frontend produce same body aliases and extra name aliases for
420 virtual functions and vtables that are obviously equivalent.
421 Those aliases are bit special, especially because C++ frontend
422 visibility code is so ugly it can not get them right at first time
423 and their visibility needs to be copied from their "masters" at
424 the end of parsing. */
425 unsigned cpp_implicit_alias : 1;
426 /* Set once the definition was analyzed. The list of references and
427 other properties are built during analysis. */
428 unsigned analyzed : 1;
429 /* Set for write-only variables. */
430 unsigned writeonly : 1;
431 /* Visibility of symbol was used for further optimization; do not
432 permit further changes. */
433 unsigned refuse_visibility_changes : 1;
434
435 /*** Visibility and linkage flags. ***/
436
437 /* Set when function is visible by other units. */
438 unsigned externally_visible : 1;
439 /* Don't reorder to other symbols having this set. */
440 unsigned no_reorder : 1;
441 /* The symbol will be assumed to be used in an invisible way (like
442 by an toplevel asm statement). */
443 unsigned force_output : 1;
444 /* Like FORCE_OUTPUT, but in the case it is ABI requiring the symbol to be
445 exported. Unlike FORCE_OUTPUT this flag gets cleared to symbols promoted
446 to static and it does not inhibit optimization. */
447 unsigned forced_by_abi : 1;
448 /* True when the name is known to be unique and thus it does not need mangling. */
449 unsigned unique_name : 1;
450 /* Specify whether the section was set by user or by
451 compiler via -ffunction-sections. */
452 unsigned implicit_section : 1;
453 /* True when body and other characteristics have been removed by
454 symtab_remove_unreachable_nodes. */
455 unsigned body_removed : 1;
456
457 /*** WHOPR Partitioning flags.
458 These flags are used at ltrans stage when only part of the callgraph is
459 available. ***/
460
461 /* Set when variable is used from other LTRANS partition. */
462 unsigned used_from_other_partition : 1;
463 /* Set when function is available in the other LTRANS partition.
464 During WPA output it is used to mark nodes that are present in
465 multiple partitions. */
466 unsigned in_other_partition : 1;
467
468
469
470 /*** other flags. ***/
471
472 /* Set when symbol has address taken. */
473 unsigned address_taken : 1;
474 /* Set when init priority is set. */
475 unsigned in_init_priority_hash : 1;
476
477 /* Set when symbol needs to be streamed into LTO bytecode for LTO, or in case
478 of offloading, for separate compilation for a different target. */
479 unsigned need_lto_streaming : 1;
480
481 /* Set when symbol can be streamed into bytecode for offloading. */
482 unsigned offloadable : 1;
483
484
485 /* Ordering of all symtab entries. */
486 int order;
487
488 /* Declaration representing the symbol. */
489 tree decl;
490
491 /* Linked list of symbol table entries starting with symtab_nodes. */
492 symtab_node *next;
493 symtab_node *previous;
494
495 /* Linked list of symbols with the same asm name. There may be multiple
496 entries for single symbol name during LTO, because symbols are renamed
497 only after partitioning.
498
499 Because inline clones are kept in the assembler name has, they also produce
500 duplicate entries.
501
502 There are also several long standing bugs where frontends and builtin
503 code produce duplicated decls. */
504 symtab_node *next_sharing_asm_name;
505 symtab_node *previous_sharing_asm_name;
506
507 /* Circular list of nodes in the same comdat group if non-NULL. */
508 symtab_node *same_comdat_group;
509
510 /* Vectors of referring and referenced entities. */
511 ipa_ref_list ref_list;
512
513 /* Alias target. May be either DECL pointer or ASSEMBLER_NAME pointer
514 depending to what was known to frontend on the creation time.
515 Once alias is resolved, this pointer become NULL. */
516 tree alias_target;
517
518 /* File stream where this node is being written to. */
519 struct lto_file_decl_data * lto_file_data;
520
521 PTR GTY ((skip)) aux;
522
523 /* Comdat group the symbol is in. Can be private if GGC allowed that. */
524 tree x_comdat_group;
525
526 /* Section name. Again can be private, if allowed. */
527 section_hash_entry *x_section;
528
529 protected:
530 /* Dump base fields of symtab nodes to F. Not to be used directly. */
531 void dump_base (FILE *);
532
533 /* Verify common part of symtab node. */
534 bool DEBUG_FUNCTION verify_base (void);
535
536 /* Remove node from symbol table. This function is not used directly, but via
537 cgraph/varpool node removal routines. */
538 void unregister (void);
539
540 /* Return the initialization and finalization priority information for
541 DECL. If there is no previous priority information, a freshly
542 allocated structure is returned. */
543 struct symbol_priority_map *priority_info (void);
544
545 /* Worker for call_for_symbol_and_aliases_1. */
546 bool call_for_symbol_and_aliases_1 (bool (*callback) (symtab_node *, void *),
547 void *data,
548 bool include_overwrite);
549 private:
550 /* Worker for set_section. */
551 static bool set_section (symtab_node *n, void *s);
552
553 /* Worker for symtab_resolve_alias. */
554 static bool set_implicit_section (symtab_node *n, void *);
555
556 /* Worker searching noninterposable alias. */
557 static bool noninterposable_alias (symtab_node *node, void *data);
558
559 /* Worker for ultimate_alias_target. */
560 symtab_node *ultimate_alias_target_1 (enum availability *avail = NULL);
561 };
562
563 /* Walk all aliases for NODE. */
564 #define FOR_EACH_ALIAS(node, alias) \
565 for (unsigned x_i = 0; node->iterate_direct_aliases (x_i, alias); x_i++)
566
567 /* This is the information that is put into the cgraph local structure
568 to recover a function. */
569 struct lto_file_decl_data;
570
571 extern const char * const cgraph_availability_names[];
572 extern const char * const ld_plugin_symbol_resolution_names[];
573 extern const char * const tls_model_names[];
574
575 /* Information about thunk, used only for same body aliases. */
576
577 struct GTY(()) cgraph_thunk_info {
578 /* Information about the thunk. */
579 HOST_WIDE_INT fixed_offset;
580 HOST_WIDE_INT virtual_value;
581 tree alias;
582 bool this_adjusting;
583 bool virtual_offset_p;
584 bool add_pointer_bounds_args;
585 /* Set to true when alias node is thunk. */
586 bool thunk_p;
587 };
588
589 /* Information about the function collected locally.
590 Available after function is analyzed. */
591
592 struct GTY(()) cgraph_local_info {
593 /* Set when function function is visible in current compilation unit only
594 and its address is never taken. */
595 unsigned local : 1;
596
597 /* False when there is something makes versioning impossible. */
598 unsigned versionable : 1;
599
600 /* False when function calling convention and signature can not be changed.
601 This is the case when __builtin_apply_args is used. */
602 unsigned can_change_signature : 1;
603
604 /* True when the function has been originally extern inline, but it is
605 redefined now. */
606 unsigned redefined_extern_inline : 1;
607
608 /* True if the function may enter serial irrevocable mode. */
609 unsigned tm_may_enter_irr : 1;
610 };
611
612 /* Information about the function that needs to be computed globally
613 once compilation is finished. Available only with -funit-at-a-time. */
614
615 struct GTY(()) cgraph_global_info {
616 /* For inline clones this points to the function they will be
617 inlined into. */
618 cgraph_node *inlined_to;
619 };
620
621 /* Represent which DECL tree (or reference to such tree)
622 will be replaced by another tree while versioning. */
623 struct GTY(()) ipa_replace_map
624 {
625 /* The tree that will be replaced. */
626 tree old_tree;
627 /* The new (replacing) tree. */
628 tree new_tree;
629 /* Parameter number to replace, when old_tree is NULL. */
630 int parm_num;
631 /* True when a substitution should be done, false otherwise. */
632 bool replace_p;
633 /* True when we replace a reference to old_tree. */
634 bool ref_p;
635 };
636
637 struct GTY(()) cgraph_clone_info
638 {
639 vec<ipa_replace_map *, va_gc> *tree_map;
640 bitmap args_to_skip;
641 bitmap combined_args_to_skip;
642 };
643
644 enum cgraph_simd_clone_arg_type
645 {
646 SIMD_CLONE_ARG_TYPE_VECTOR,
647 SIMD_CLONE_ARG_TYPE_UNIFORM,
648 SIMD_CLONE_ARG_TYPE_LINEAR_CONSTANT_STEP,
649 SIMD_CLONE_ARG_TYPE_LINEAR_VARIABLE_STEP,
650 SIMD_CLONE_ARG_TYPE_MASK
651 };
652
653 /* Function arguments in the original function of a SIMD clone.
654 Supplementary data for `struct simd_clone'. */
655
656 struct GTY(()) cgraph_simd_clone_arg {
657 /* Original function argument as it originally existed in
658 DECL_ARGUMENTS. */
659 tree orig_arg;
660
661 /* orig_arg's function (or for extern functions type from
662 TYPE_ARG_TYPES). */
663 tree orig_type;
664
665 /* If argument is a vector, this holds the vector version of
666 orig_arg that after adjusting the argument types will live in
667 DECL_ARGUMENTS. Otherwise, this is NULL.
668
669 This basically holds:
670 vector(simdlen) __typeof__(orig_arg) new_arg. */
671 tree vector_arg;
672
673 /* vector_arg's type (or for extern functions new vector type. */
674 tree vector_type;
675
676 /* If argument is a vector, this holds the array where the simd
677 argument is held while executing the simd clone function. This
678 is a local variable in the cloned function. Its content is
679 copied from vector_arg upon entry to the clone.
680
681 This basically holds:
682 __typeof__(orig_arg) simd_array[simdlen]. */
683 tree simd_array;
684
685 /* A SIMD clone's argument can be either linear (constant or
686 variable), uniform, or vector. */
687 enum cgraph_simd_clone_arg_type arg_type;
688
689 /* For arg_type SIMD_CLONE_ARG_TYPE_LINEAR_CONSTANT_STEP this is
690 the constant linear step, if arg_type is
691 SIMD_CLONE_ARG_TYPE_LINEAR_VARIABLE_STEP, this is index of
692 the uniform argument holding the step, otherwise 0. */
693 HOST_WIDE_INT linear_step;
694
695 /* Variable alignment if available, otherwise 0. */
696 unsigned int alignment;
697 };
698
699 /* Specific data for a SIMD function clone. */
700
701 struct GTY(()) cgraph_simd_clone {
702 /* Number of words in the SIMD lane associated with this clone. */
703 unsigned int simdlen;
704
705 /* Number of annotated function arguments in `args'. This is
706 usually the number of named arguments in FNDECL. */
707 unsigned int nargs;
708
709 /* Max hardware vector size in bits for integral vectors. */
710 unsigned int vecsize_int;
711
712 /* Max hardware vector size in bits for floating point vectors. */
713 unsigned int vecsize_float;
714
715 /* The mangling character for a given vector size. This is is used
716 to determine the ISA mangling bit as specified in the Intel
717 Vector ABI. */
718 unsigned char vecsize_mangle;
719
720 /* True if this is the masked, in-branch version of the clone,
721 otherwise false. */
722 unsigned int inbranch : 1;
723
724 /* True if this is a Cilk Plus variant. */
725 unsigned int cilk_elemental : 1;
726
727 /* Doubly linked list of SIMD clones. */
728 cgraph_node *prev_clone, *next_clone;
729
730 /* Original cgraph node the SIMD clones were created for. */
731 cgraph_node *origin;
732
733 /* Annotated function arguments for the original function. */
734 cgraph_simd_clone_arg GTY((length ("%h.nargs"))) args[1];
735 };
736
737 /* Function Multiversioning info. */
738 struct GTY((for_user)) cgraph_function_version_info {
739 /* The cgraph_node for which the function version info is stored. */
740 cgraph_node *this_node;
741 /* Chains all the semantically identical function versions. The
742 first function in this chain is the version_info node of the
743 default function. */
744 cgraph_function_version_info *prev;
745 /* If this version node corresponds to a dispatcher for function
746 versions, this points to the version info node of the default
747 function, the first node in the chain. */
748 cgraph_function_version_info *next;
749 /* If this node corresponds to a function version, this points
750 to the dispatcher function decl, which is the function that must
751 be called to execute the right function version at run-time.
752
753 If this cgraph node is a dispatcher (if dispatcher_function is
754 true, in the cgraph_node struct) for function versions, this
755 points to resolver function, which holds the function body of the
756 dispatcher. The dispatcher decl is an alias to the resolver
757 function decl. */
758 tree dispatcher_resolver;
759 };
760
761 #define DEFCIFCODE(code, type, string) CIF_ ## code,
762 /* Reasons for inlining failures. */
763
764 enum cgraph_inline_failed_t {
765 #include "cif-code.def"
766 CIF_N_REASONS
767 };
768
769 enum cgraph_inline_failed_type_t
770 {
771 CIF_FINAL_NORMAL = 0,
772 CIF_FINAL_ERROR
773 };
774
775 struct cgraph_edge;
776
777 struct cgraph_edge_hasher : ggc_ptr_hash<cgraph_edge>
778 {
779 typedef gimple compare_type;
780
781 static hashval_t hash (cgraph_edge *);
782 static hashval_t hash (gimple);
783 static bool equal (cgraph_edge *, gimple);
784 };
785
786 /* The cgraph data structure.
787 Each function decl has assigned cgraph_node listing callees and callers. */
788
789 struct GTY((tag ("SYMTAB_FUNCTION"))) cgraph_node : public symtab_node {
790 public:
791 /* Remove the node from cgraph and all inline clones inlined into it.
792 Skip however removal of FORBIDDEN_NODE and return true if it needs to be
793 removed. This allows to call the function from outer loop walking clone
794 tree. */
795 bool remove_symbol_and_inline_clones (cgraph_node *forbidden_node = NULL);
796
797 /* Record all references from cgraph_node that are taken
798 in statement STMT. */
799 void record_stmt_references (gimple stmt);
800
801 /* Like cgraph_set_call_stmt but walk the clone tree and update all
802 clones sharing the same function body.
803 When WHOLE_SPECULATIVE_EDGES is true, all three components of
804 speculative edge gets updated. Otherwise we update only direct
805 call. */
806 void set_call_stmt_including_clones (gimple old_stmt, gcall *new_stmt,
807 bool update_speculative = true);
808
809 /* Walk the alias chain to return the function cgraph_node is alias of.
810 Walk through thunk, too.
811 When AVAILABILITY is non-NULL, get minimal availability in the chain. */
812 cgraph_node *function_symbol (enum availability *avail = NULL);
813
814 /* Walk the alias chain to return the function cgraph_node is alias of.
815 Walk through non virtual thunks, too. Thus we return either a function
816 or a virtual thunk node.
817 When AVAILABILITY is non-NULL, get minimal availability in the chain. */
818 cgraph_node *function_or_virtual_thunk_symbol
819 (enum availability *avail = NULL);
820
821 /* Create node representing clone of N executed COUNT times. Decrease
822 the execution counts from original node too.
823 The new clone will have decl set to DECL that may or may not be the same
824 as decl of N.
825
826 When UPDATE_ORIGINAL is true, the counts are subtracted from the original
827 function's profile to reflect the fact that part of execution is handled
828 by node.
829 When CALL_DUPLICATOIN_HOOK is true, the ipa passes are acknowledged about
830 the new clone. Otherwise the caller is responsible for doing so later.
831
832 If the new node is being inlined into another one, NEW_INLINED_TO should be
833 the outline function the new one is (even indirectly) inlined to.
834 All hooks will see this in node's global.inlined_to, when invoked.
835 Can be NULL if the node is not inlined. */
836 cgraph_node *create_clone (tree decl, gcov_type count, int freq,
837 bool update_original,
838 vec<cgraph_edge *> redirect_callers,
839 bool call_duplication_hook,
840 cgraph_node *new_inlined_to,
841 bitmap args_to_skip);
842
843 /* Create callgraph node clone with new declaration. The actual body will
844 be copied later at compilation stage. */
845 cgraph_node *create_virtual_clone (vec<cgraph_edge *> redirect_callers,
846 vec<ipa_replace_map *, va_gc> *tree_map,
847 bitmap args_to_skip, const char * suffix);
848
849 /* cgraph node being removed from symbol table; see if its entry can be
850 replaced by other inline clone. */
851 cgraph_node *find_replacement (void);
852
853 /* Create a new cgraph node which is the new version of
854 callgraph node. REDIRECT_CALLERS holds the callers
855 edges which should be redirected to point to
856 NEW_VERSION. ALL the callees edges of the node
857 are cloned to the new version node. Return the new
858 version node.
859
860 If non-NULL BLOCK_TO_COPY determine what basic blocks
861 was copied to prevent duplications of calls that are dead
862 in the clone. */
863
864 cgraph_node *create_version_clone (tree new_decl,
865 vec<cgraph_edge *> redirect_callers,
866 bitmap bbs_to_copy);
867
868 /* Perform function versioning.
869 Function versioning includes copying of the tree and
870 a callgraph update (creating a new cgraph node and updating
871 its callees and callers).
872
873 REDIRECT_CALLERS varray includes the edges to be redirected
874 to the new version.
875
876 TREE_MAP is a mapping of tree nodes we want to replace with
877 new ones (according to results of prior analysis).
878
879 If non-NULL ARGS_TO_SKIP determine function parameters to remove
880 from new version.
881 If SKIP_RETURN is true, the new version will return void.
882 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
883 If non_NULL NEW_ENTRY determine new entry BB of the clone.
884
885 Return the new version's cgraph node. */
886 cgraph_node *create_version_clone_with_body
887 (vec<cgraph_edge *> redirect_callers,
888 vec<ipa_replace_map *, va_gc> *tree_map, bitmap args_to_skip,
889 bool skip_return, bitmap bbs_to_copy, basic_block new_entry_block,
890 const char *clone_name);
891
892 /* Insert a new cgraph_function_version_info node into cgraph_fnver_htab
893 corresponding to cgraph_node. */
894 cgraph_function_version_info *insert_new_function_version (void);
895
896 /* Get the cgraph_function_version_info node corresponding to node. */
897 cgraph_function_version_info *function_version (void);
898
899 /* Discover all functions and variables that are trivially needed, analyze
900 them as well as all functions and variables referred by them */
901 void analyze (void);
902
903 /* Add thunk alias into callgraph. The alias declaration is ALIAS and it
904 aliases DECL with an adjustments made into the first parameter.
905 See comments in thunk_adjust for detail on the parameters. */
906 cgraph_node * create_thunk (tree alias, tree, bool this_adjusting,
907 HOST_WIDE_INT fixed_offset,
908 HOST_WIDE_INT virtual_value,
909 tree virtual_offset,
910 tree real_alias);
911
912
913 /* Return node that alias is aliasing. */
914 inline cgraph_node *get_alias_target (void);
915
916 /* Given function symbol, walk the alias chain to return the function node
917 is alias of. Do not walk through thunks.
918 When AVAILABILITY is non-NULL, get minimal availability in the chain. */
919
920 cgraph_node *ultimate_alias_target (availability *availability = NULL);
921
922 /* Expand thunk NODE to gimple if possible.
923 When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
924 no assembler is produced.
925 When OUTPUT_ASM_THUNK is true, also produce assembler for
926 thunks that are not lowered. */
927 bool expand_thunk (bool output_asm_thunks, bool force_gimple_thunk);
928
929 /* Call expand_thunk on all callers that are thunks and analyze those
930 nodes that were expanded. */
931 void expand_all_artificial_thunks ();
932
933 /* Assemble thunks and aliases associated to node. */
934 void assemble_thunks_and_aliases (void);
935
936 /* Expand function specified by node. */
937 void expand (void);
938
939 /* As an GCC extension we allow redefinition of the function. The
940 semantics when both copies of bodies differ is not well defined.
941 We replace the old body with new body so in unit at a time mode
942 we always use new body, while in normal mode we may end up with
943 old body inlined into some functions and new body expanded and
944 inlined in others. */
945 void reset (void);
946
947 /* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
948 kind of wrapper method. */
949 void create_wrapper (cgraph_node *target);
950
951 /* Verify cgraph nodes of the cgraph node. */
952 void DEBUG_FUNCTION verify_node (void);
953
954 /* Remove function from symbol table. */
955 void remove (void);
956
957 /* Dump call graph node to file F. */
958 void dump (FILE *f);
959
960 /* Dump call graph node to stderr. */
961 void DEBUG_FUNCTION debug (void);
962
963 /* When doing LTO, read cgraph_node's body from disk if it is not already
964 present. */
965 bool get_untransformed_body (void);
966
967 /* Prepare function body. When doing LTO, read cgraph_node's body from disk
968 if it is not already present. When some IPA transformations are scheduled,
969 apply them. */
970 bool get_body (void);
971
972 /* Release memory used to represent body of function.
973 Use this only for functions that are released before being translated to
974 target code (i.e. RTL). Functions that are compiled to RTL and beyond
975 are free'd in final.c via free_after_compilation(). */
976 void release_body (bool keep_arguments = false);
977
978 /* Return the DECL_STRUCT_FUNCTION of the function. */
979 struct function *get_fun (void);
980
981 /* cgraph_node is no longer nested function; update cgraph accordingly. */
982 void unnest (void);
983
984 /* Bring cgraph node local. */
985 void make_local (void);
986
987 /* Likewise indicate that a node is having address taken. */
988 void mark_address_taken (void);
989
990 /* Set fialization priority to PRIORITY. */
991 void set_fini_priority (priority_type priority);
992
993 /* Return the finalization priority. */
994 priority_type get_fini_priority (void);
995
996 /* Create edge from a given function to CALLEE in the cgraph. */
997 cgraph_edge *create_edge (cgraph_node *callee,
998 gcall *call_stmt, gcov_type count,
999 int freq);
1000
1001 /* Create an indirect edge with a yet-undetermined callee where the call
1002 statement destination is a formal parameter of the caller with index
1003 PARAM_INDEX. */
1004 cgraph_edge *create_indirect_edge (gcall *call_stmt, int ecf_flags,
1005 gcov_type count, int freq,
1006 bool compute_indirect_info = true);
1007
1008 /* Like cgraph_create_edge walk the clone tree and update all clones sharing
1009 same function body. If clones already have edge for OLD_STMT; only
1010 update the edge same way as cgraph_set_call_stmt_including_clones does. */
1011 void create_edge_including_clones (cgraph_node *callee,
1012 gimple old_stmt, gcall *stmt,
1013 gcov_type count,
1014 int freq,
1015 cgraph_inline_failed_t reason);
1016
1017 /* Return the callgraph edge representing the GIMPLE_CALL statement
1018 CALL_STMT. */
1019 cgraph_edge *get_edge (gimple call_stmt);
1020
1021 /* Collect all callers of cgraph_node and its aliases that are known to lead
1022 to NODE (i.e. are not overwritable). */
1023 vec<cgraph_edge *> collect_callers (void);
1024
1025 /* Remove all callers from the node. */
1026 void remove_callers (void);
1027
1028 /* Remove all callees from the node. */
1029 void remove_callees (void);
1030
1031 /* Return function availability. See cgraph.h for description of individual
1032 return values. */
1033 enum availability get_availability (void);
1034
1035 /* Set TREE_NOTHROW on cgraph_node's decl and on aliases of the node
1036 if any to NOTHROW. */
1037 void set_nothrow_flag (bool nothrow);
1038
1039 /* Set TREE_READONLY on cgraph_node's decl and on aliases of the node
1040 if any to READONLY. */
1041 void set_const_flag (bool readonly, bool looping);
1042
1043 /* Set DECL_PURE_P on cgraph_node's decl and on aliases of the node
1044 if any to PURE. */
1045 void set_pure_flag (bool pure, bool looping);
1046
1047 /* Call callback on function and aliases associated to the function.
1048 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
1049 skipped. */
1050
1051 bool call_for_symbol_and_aliases (bool (*callback) (cgraph_node *,
1052 void *),
1053 void *data, bool include_overwritable);
1054
1055 /* Call callback on cgraph_node, thunks and aliases associated to NODE.
1056 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
1057 skipped. When EXCLUDE_VIRTUAL_THUNKS is true, virtual thunks are
1058 skipped. */
1059 bool call_for_symbol_thunks_and_aliases (bool (*callback) (cgraph_node *node,
1060 void *data),
1061 void *data,
1062 bool include_overwritable,
1063 bool exclude_virtual_thunks = false);
1064
1065 /* Likewise indicate that a node is needed, i.e. reachable via some
1066 external means. */
1067 inline void mark_force_output (void);
1068
1069 /* Return true when function can be marked local. */
1070 bool local_p (void);
1071
1072 /* Return true if cgraph_node can be made local for API change.
1073 Extern inline functions and C++ COMDAT functions can be made local
1074 at the expense of possible code size growth if function is used in multiple
1075 compilation units. */
1076 bool can_be_local_p (void);
1077
1078 /* Return true when cgraph_node can not return or throw and thus
1079 it is safe to ignore its side effects for IPA analysis. */
1080 bool cannot_return_p (void);
1081
1082 /* Return true when function cgraph_node and all its aliases are only called
1083 directly.
1084 i.e. it is not externally visible, address was not taken and
1085 it is not used in any other non-standard way. */
1086 bool only_called_directly_p (void);
1087
1088 /* Return true when function is only called directly or it has alias.
1089 i.e. it is not externally visible, address was not taken and
1090 it is not used in any other non-standard way. */
1091 inline bool only_called_directly_or_aliased_p (void);
1092
1093 /* Return true when function cgraph_node can be expected to be removed
1094 from program when direct calls in this compilation unit are removed.
1095
1096 As a special case COMDAT functions are
1097 cgraph_can_remove_if_no_direct_calls_p while the are not
1098 cgraph_only_called_directly_p (it is possible they are called from other
1099 unit)
1100
1101 This function behaves as cgraph_only_called_directly_p because eliminating
1102 all uses of COMDAT function does not make it necessarily disappear from
1103 the program unless we are compiling whole program or we do LTO. In this
1104 case we know we win since dynamic linking will not really discard the
1105 linkonce section.
1106
1107 If WILL_INLINE is true, assume that function will be inlined into all the
1108 direct calls. */
1109 bool will_be_removed_from_program_if_no_direct_calls_p
1110 (bool will_inline = false);
1111
1112 /* Return true when function can be removed from callgraph
1113 if all direct calls and references are eliminated. The function does
1114 not take into account comdat groups. */
1115 bool can_remove_if_no_direct_calls_and_refs_p (void);
1116
1117 /* Return true when function cgraph_node and its aliases can be removed from
1118 callgraph if all direct calls are eliminated.
1119 If WILL_INLINE is true, assume that function will be inlined into all the
1120 direct calls. */
1121 bool can_remove_if_no_direct_calls_p (bool will_inline = false);
1122
1123 /* Return true when callgraph node is a function with Gimple body defined
1124 in current unit. Functions can also be define externally or they
1125 can be thunks with no Gimple representation.
1126
1127 Note that at WPA stage, the function body may not be present in memory. */
1128 inline bool has_gimple_body_p (void);
1129
1130 /* Return true if function should be optimized for size. */
1131 bool optimize_for_size_p (void);
1132
1133 /* Dump the callgraph to file F. */
1134 static void dump_cgraph (FILE *f);
1135
1136 /* Dump the call graph to stderr. */
1137 static inline
1138 void debug_cgraph (void)
1139 {
1140 dump_cgraph (stderr);
1141 }
1142
1143 /* Record that DECL1 and DECL2 are semantically identical function
1144 versions. */
1145 static void record_function_versions (tree decl1, tree decl2);
1146
1147 /* Remove the cgraph_function_version_info and cgraph_node for DECL. This
1148 DECL is a duplicate declaration. */
1149 static void delete_function_version (tree decl);
1150
1151 /* Add the function FNDECL to the call graph.
1152 Unlike finalize_function, this function is intended to be used
1153 by middle end and allows insertion of new function at arbitrary point
1154 of compilation. The function can be either in high, low or SSA form
1155 GIMPLE.
1156
1157 The function is assumed to be reachable and have address taken (so no
1158 API breaking optimizations are performed on it).
1159
1160 Main work done by this function is to enqueue the function for later
1161 processing to avoid need the passes to be re-entrant. */
1162 static void add_new_function (tree fndecl, bool lowered);
1163
1164 /* Return callgraph node for given symbol and check it is a function. */
1165 static inline cgraph_node *get (const_tree decl)
1166 {
1167 gcc_checking_assert (TREE_CODE (decl) == FUNCTION_DECL);
1168 return dyn_cast <cgraph_node *> (symtab_node::get (decl));
1169 }
1170
1171 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
1172 logic in effect. If NO_COLLECT is true, then our caller cannot stand to
1173 have the garbage collector run at the moment. We would need to either
1174 create a new GC context, or just not compile right now. */
1175 static void finalize_function (tree, bool);
1176
1177 /* Return cgraph node assigned to DECL. Create new one when needed. */
1178 static cgraph_node * create (tree decl);
1179
1180 /* Try to find a call graph node for declaration DECL and if it does not
1181 exist or if it corresponds to an inline clone, create a new one. */
1182 static cgraph_node * get_create (tree);
1183
1184 /* Return local info for the compiled function. */
1185 static cgraph_local_info *local_info (tree decl);
1186
1187 /* Return local info for the compiled function. */
1188 static struct cgraph_rtl_info *rtl_info (tree);
1189
1190 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
1191 Return NULL if there's no such node. */
1192 static cgraph_node *get_for_asmname (tree asmname);
1193
1194 /* Attempt to mark ALIAS as an alias to DECL. Return alias node if
1195 successful and NULL otherwise.
1196 Same body aliases are output whenever the body of DECL is output,
1197 and cgraph_node::get (ALIAS) transparently
1198 returns cgraph_node::get (DECL). */
1199 static cgraph_node * create_same_body_alias (tree alias, tree decl);
1200
1201 /* Verify whole cgraph structure. */
1202 static void DEBUG_FUNCTION verify_cgraph_nodes (void);
1203
1204 /* Worker to bring NODE local. */
1205 static bool make_local (cgraph_node *node, void *);
1206
1207 /* Mark ALIAS as an alias to DECL. DECL_NODE is cgraph node representing
1208 the function body is associated
1209 with (not necessarily cgraph_node (DECL). */
1210 static cgraph_node *create_alias (tree alias, tree target);
1211
1212 /* Return true if NODE has thunk. */
1213 static bool has_thunk_p (cgraph_node *node, void *);
1214
1215 cgraph_edge *callees;
1216 cgraph_edge *callers;
1217 /* List of edges representing indirect calls with a yet undetermined
1218 callee. */
1219 cgraph_edge *indirect_calls;
1220 /* For nested functions points to function the node is nested in. */
1221 cgraph_node *origin;
1222 /* Points to first nested function, if any. */
1223 cgraph_node *nested;
1224 /* Pointer to the next function with same origin, if any. */
1225 cgraph_node *next_nested;
1226 /* Pointer to the next clone. */
1227 cgraph_node *next_sibling_clone;
1228 cgraph_node *prev_sibling_clone;
1229 cgraph_node *clones;
1230 cgraph_node *clone_of;
1231 /* If instrumentation_clone is 1 then instrumented_version points
1232 to the original function used to make instrumented version.
1233 Otherwise points to instrumented version of the function. */
1234 cgraph_node *instrumented_version;
1235 /* If instrumentation_clone is 1 then orig_decl is the original
1236 function declaration. */
1237 tree orig_decl;
1238 /* For functions with many calls sites it holds map from call expression
1239 to the edge to speed up cgraph_edge function. */
1240 hash_table<cgraph_edge_hasher> *GTY(()) call_site_hash;
1241 /* Declaration node used to be clone of. */
1242 tree former_clone_of;
1243
1244 /* If this is a SIMD clone, this points to the SIMD specific
1245 information for it. */
1246 cgraph_simd_clone *simdclone;
1247 /* If this function has SIMD clones, this points to the first clone. */
1248 cgraph_node *simd_clones;
1249
1250 /* Interprocedural passes scheduled to have their transform functions
1251 applied next time we execute local pass on them. We maintain it
1252 per-function in order to allow IPA passes to introduce new functions. */
1253 vec<ipa_opt_pass> GTY((skip)) ipa_transforms_to_apply;
1254
1255 cgraph_local_info local;
1256 cgraph_global_info global;
1257 struct cgraph_rtl_info *rtl;
1258 cgraph_clone_info clone;
1259 cgraph_thunk_info thunk;
1260
1261 /* Expected number of executions: calculated in profile.c. */
1262 gcov_type count;
1263 /* How to scale counts at materialization time; used to merge
1264 LTO units with different number of profile runs. */
1265 int count_materialization_scale;
1266 /* Unique id of the node. */
1267 int uid;
1268 /* Summary unique id of the node. */
1269 int summary_uid;
1270 /* ID assigned by the profiling. */
1271 unsigned int profile_id;
1272 /* Time profiler: first run of function. */
1273 int tp_first_run;
1274
1275 /* Set when decl is an abstract function pointed to by the
1276 ABSTRACT_DECL_ORIGIN of a reachable function. */
1277 unsigned used_as_abstract_origin : 1;
1278 /* Set once the function is lowered (i.e. its CFG is built). */
1279 unsigned lowered : 1;
1280 /* Set once the function has been instantiated and its callee
1281 lists created. */
1282 unsigned process : 1;
1283 /* How commonly executed the node is. Initialized during branch
1284 probabilities pass. */
1285 ENUM_BITFIELD (node_frequency) frequency : 2;
1286 /* True when function can only be called at startup (from static ctor). */
1287 unsigned only_called_at_startup : 1;
1288 /* True when function can only be called at startup (from static dtor). */
1289 unsigned only_called_at_exit : 1;
1290 /* True when function is the transactional clone of a function which
1291 is called only from inside transactions. */
1292 /* ?? We should be able to remove this. We have enough bits in
1293 cgraph to calculate it. */
1294 unsigned tm_clone : 1;
1295 /* True if this decl is a dispatcher for function versions. */
1296 unsigned dispatcher_function : 1;
1297 /* True if this decl calls a COMDAT-local function. This is set up in
1298 compute_inline_parameters and inline_call. */
1299 unsigned calls_comdat_local : 1;
1300 /* True if node has been created by merge operation in IPA-ICF. */
1301 unsigned icf_merged: 1;
1302 /* True when function is clone created for Pointer Bounds Checker
1303 instrumentation. */
1304 unsigned instrumentation_clone : 1;
1305 /* True if call to node can't result in a call to free, munmap or
1306 other operation that could make previously non-trapping memory
1307 accesses trapping. */
1308 unsigned nonfreeing_fn : 1;
1309 /* True if there was multiple COMDAT bodies merged by lto-symtab. */
1310 unsigned merged : 1;
1311 /* True if function was created to be executed in parallel. */
1312 unsigned parallelized_function : 1;
1313 /* True if function is part split out by ipa-split. */
1314 unsigned split_part : 1;
1315
1316 private:
1317 /* Worker for call_for_symbol_and_aliases. */
1318 bool call_for_symbol_and_aliases_1 (bool (*callback) (cgraph_node *,
1319 void *),
1320 void *data, bool include_overwritable);
1321 };
1322
1323 /* A cgraph node set is a collection of cgraph nodes. A cgraph node
1324 can appear in multiple sets. */
1325 struct cgraph_node_set_def
1326 {
1327 hash_map<cgraph_node *, size_t> *map;
1328 vec<cgraph_node *> nodes;
1329 };
1330
1331 typedef cgraph_node_set_def *cgraph_node_set;
1332 typedef struct varpool_node_set_def *varpool_node_set;
1333
1334 class varpool_node;
1335
1336 /* A varpool node set is a collection of varpool nodes. A varpool node
1337 can appear in multiple sets. */
1338 struct varpool_node_set_def
1339 {
1340 hash_map<varpool_node *, size_t> * map;
1341 vec<varpool_node *> nodes;
1342 };
1343
1344 /* Iterator structure for cgraph node sets. */
1345 struct cgraph_node_set_iterator
1346 {
1347 cgraph_node_set set;
1348 unsigned index;
1349 };
1350
1351 /* Iterator structure for varpool node sets. */
1352 struct varpool_node_set_iterator
1353 {
1354 varpool_node_set set;
1355 unsigned index;
1356 };
1357
1358 /* Context of polymorphic call. It represent information about the type of
1359 instance that may reach the call. This is used by ipa-devirt walkers of the
1360 type inheritance graph. */
1361
1362 class GTY(()) ipa_polymorphic_call_context {
1363 public:
1364 /* The called object appears in an object of type OUTER_TYPE
1365 at offset OFFSET. When information is not 100% reliable, we
1366 use SPECULATIVE_OUTER_TYPE and SPECULATIVE_OFFSET. */
1367 HOST_WIDE_INT offset;
1368 HOST_WIDE_INT speculative_offset;
1369 tree outer_type;
1370 tree speculative_outer_type;
1371 /* True if outer object may be in construction or destruction. */
1372 unsigned maybe_in_construction : 1;
1373 /* True if outer object may be of derived type. */
1374 unsigned maybe_derived_type : 1;
1375 /* True if speculative outer object may be of derived type. We always
1376 speculate that construction does not happen. */
1377 unsigned speculative_maybe_derived_type : 1;
1378 /* True if the context is invalid and all calls should be redirected
1379 to BUILTIN_UNREACHABLE. */
1380 unsigned invalid : 1;
1381 /* True if the outer type is dynamic. */
1382 unsigned dynamic : 1;
1383
1384 /* Build empty "I know nothing" context. */
1385 ipa_polymorphic_call_context ();
1386 /* Build polymorphic call context for indirect call E. */
1387 ipa_polymorphic_call_context (cgraph_edge *e);
1388 /* Build polymorphic call context for IP invariant CST.
1389 If specified, OTR_TYPE specify the type of polymorphic call
1390 that takes CST+OFFSET as a prameter. */
1391 ipa_polymorphic_call_context (tree cst, tree otr_type = NULL,
1392 HOST_WIDE_INT offset = 0);
1393 /* Build context for pointer REF contained in FNDECL at statement STMT.
1394 if INSTANCE is non-NULL, return pointer to the object described by
1395 the context. */
1396 ipa_polymorphic_call_context (tree fndecl, tree ref, gimple stmt,
1397 tree *instance = NULL);
1398
1399 /* Look for vtable stores or constructor calls to work out dynamic type
1400 of memory location. */
1401 bool get_dynamic_type (tree, tree, tree, gimple);
1402
1403 /* Make context non-speculative. */
1404 void clear_speculation ();
1405
1406 /* Produce context specifying all derrived types of OTR_TYPE. If OTR_TYPE is
1407 NULL, the context is set to dummy "I know nothing" setting. */
1408 void clear_outer_type (tree otr_type = NULL);
1409
1410 /* Walk container types and modify context to point to actual class
1411 containing OTR_TYPE (if non-NULL) as base class.
1412 Return true if resulting context is valid.
1413
1414 When CONSIDER_PLACEMENT_NEW is false, reject contexts that may be made
1415 valid only via alocation of new polymorphic type inside by means
1416 of placement new.
1417
1418 When CONSIDER_BASES is false, only look for actual fields, not base types
1419 of TYPE. */
1420 bool restrict_to_inner_class (tree otr_type,
1421 bool consider_placement_new = true,
1422 bool consider_bases = true);
1423
1424 /* Adjust all offsets in contexts by given number of bits. */
1425 void offset_by (HOST_WIDE_INT);
1426 /* Use when we can not track dynamic type change. This speculatively assume
1427 type change is not happening. */
1428 void possible_dynamic_type_change (bool, tree otr_type = NULL);
1429 /* Assume that both THIS and a given context is valid and strenghten THIS
1430 if possible. Return true if any strenghtening was made.
1431 If actual type the context is being used in is known, OTR_TYPE should be
1432 set accordingly. This improves quality of combined result. */
1433 bool combine_with (ipa_polymorphic_call_context, tree otr_type = NULL);
1434 bool meet_with (ipa_polymorphic_call_context, tree otr_type = NULL);
1435
1436 /* Return TRUE if context is fully useless. */
1437 bool useless_p () const;
1438 /* Return TRUE if this context conveys the same information as X. */
1439 bool equal_to (const ipa_polymorphic_call_context &x) const;
1440
1441 /* Dump human readable context to F. If NEWLINE is true, it will be
1442 terminated by a newline. */
1443 void dump (FILE *f, bool newline = true) const;
1444 void DEBUG_FUNCTION debug () const;
1445
1446 /* LTO streaming. */
1447 void stream_out (struct output_block *) const;
1448 void stream_in (struct lto_input_block *, struct data_in *data_in);
1449
1450 private:
1451 bool combine_speculation_with (tree, HOST_WIDE_INT, bool, tree);
1452 bool meet_speculation_with (tree, HOST_WIDE_INT, bool, tree);
1453 void set_by_decl (tree, HOST_WIDE_INT);
1454 bool set_by_invariant (tree, tree, HOST_WIDE_INT);
1455 bool speculation_consistent_p (tree, HOST_WIDE_INT, bool, tree) const;
1456 void make_speculative (tree otr_type = NULL);
1457 };
1458
1459 /* Structure containing additional information about an indirect call. */
1460
1461 struct GTY(()) cgraph_indirect_call_info
1462 {
1463 /* When agg_content is set, an offset where the call pointer is located
1464 within the aggregate. */
1465 HOST_WIDE_INT offset;
1466 /* Context of the polymorphic call; use only when POLYMORPHIC flag is set. */
1467 ipa_polymorphic_call_context context;
1468 /* OBJ_TYPE_REF_TOKEN of a polymorphic call (if polymorphic is set). */
1469 HOST_WIDE_INT otr_token;
1470 /* Type of the object from OBJ_TYPE_REF_OBJECT. */
1471 tree otr_type;
1472 /* Index of the parameter that is called. */
1473 int param_index;
1474 /* ECF flags determined from the caller. */
1475 int ecf_flags;
1476 /* Profile_id of common target obtrained from profile. */
1477 int common_target_id;
1478 /* Probability that call will land in function with COMMON_TARGET_ID. */
1479 int common_target_probability;
1480
1481 /* Set when the call is a virtual call with the parameter being the
1482 associated object pointer rather than a simple direct call. */
1483 unsigned polymorphic : 1;
1484 /* Set when the call is a call of a pointer loaded from contents of an
1485 aggregate at offset. */
1486 unsigned agg_contents : 1;
1487 /* Set when this is a call through a member pointer. */
1488 unsigned member_ptr : 1;
1489 /* When the previous bit is set, this one determines whether the destination
1490 is loaded from a parameter passed by reference. */
1491 unsigned by_ref : 1;
1492 /* For polymorphic calls this specify whether the virtual table pointer
1493 may have changed in between function entry and the call. */
1494 unsigned vptr_changed : 1;
1495 };
1496
1497 struct GTY((chain_next ("%h.next_caller"), chain_prev ("%h.prev_caller"),
1498 for_user)) cgraph_edge {
1499 friend class cgraph_node;
1500
1501 /* Remove the edge in the cgraph. */
1502 void remove (void);
1503
1504 /* Change field call_stmt of edge to NEW_STMT.
1505 If UPDATE_SPECULATIVE and E is any component of speculative
1506 edge, then update all components. */
1507 void set_call_stmt (gcall *new_stmt, bool update_speculative = true);
1508
1509 /* Redirect callee of the edge to N. The function does not update underlying
1510 call expression. */
1511 void redirect_callee (cgraph_node *n);
1512
1513 /* If the edge does not lead to a thunk, simply redirect it to N. Otherwise
1514 create one or more equivalent thunks for N and redirect E to the first in
1515 the chain. Note that it is then necessary to call
1516 n->expand_all_artificial_thunks once all callers are redirected. */
1517 void redirect_callee_duplicating_thunks (cgraph_node *n);
1518
1519 /* Make an indirect edge with an unknown callee an ordinary edge leading to
1520 CALLEE. DELTA is an integer constant that is to be added to the this
1521 pointer (first parameter) to compensate for skipping
1522 a thunk adjustment. */
1523 cgraph_edge *make_direct (cgraph_node *callee);
1524
1525 /* Turn edge into speculative call calling N2. Update
1526 the profile so the direct call is taken COUNT times
1527 with FREQUENCY. */
1528 cgraph_edge *make_speculative (cgraph_node *n2, gcov_type direct_count,
1529 int direct_frequency);
1530
1531 /* Given speculative call edge, return all three components. */
1532 void speculative_call_info (cgraph_edge *&direct, cgraph_edge *&indirect,
1533 ipa_ref *&reference);
1534
1535 /* Speculative call edge turned out to be direct call to CALLE_DECL.
1536 Remove the speculative call sequence and return edge representing the call.
1537 It is up to caller to redirect the call as appropriate. */
1538 cgraph_edge *resolve_speculation (tree callee_decl = NULL);
1539
1540 /* If necessary, change the function declaration in the call statement
1541 associated with the edge so that it corresponds to the edge callee. */
1542 gimple redirect_call_stmt_to_callee (void);
1543
1544 /* Create clone of edge in the node N represented
1545 by CALL_EXPR the callgraph. */
1546 cgraph_edge * clone (cgraph_node *n, gcall *call_stmt, unsigned stmt_uid,
1547 gcov_type count_scale, int freq_scale, bool update_original);
1548
1549 /* Verify edge count and frequency. */
1550 bool verify_count_and_frequency ();
1551
1552 /* Return true when call of edge can not lead to return from caller
1553 and thus it is safe to ignore its side effects for IPA analysis
1554 when computing side effects of the caller. */
1555 bool cannot_lead_to_return_p (void);
1556
1557 /* Return true when the edge represents a direct recursion. */
1558 bool recursive_p (void);
1559
1560 /* Return true if the call can be hot. */
1561 bool maybe_hot_p (void);
1562
1563 /* Rebuild cgraph edges for current function node. This needs to be run after
1564 passes that don't update the cgraph. */
1565 static unsigned int rebuild_edges (void);
1566
1567 /* Rebuild cgraph references for current function node. This needs to be run
1568 after passes that don't update the cgraph. */
1569 static void rebuild_references (void);
1570
1571 /* Expected number of executions: calculated in profile.c. */
1572 gcov_type count;
1573 cgraph_node *caller;
1574 cgraph_node *callee;
1575 cgraph_edge *prev_caller;
1576 cgraph_edge *next_caller;
1577 cgraph_edge *prev_callee;
1578 cgraph_edge *next_callee;
1579 gcall *call_stmt;
1580 /* Additional information about an indirect call. Not cleared when an edge
1581 becomes direct. */
1582 cgraph_indirect_call_info *indirect_info;
1583 PTR GTY ((skip (""))) aux;
1584 /* When equal to CIF_OK, inline this call. Otherwise, points to the
1585 explanation why function was not inlined. */
1586 enum cgraph_inline_failed_t inline_failed;
1587 /* The stmt_uid of call_stmt. This is used by LTO to recover the call_stmt
1588 when the function is serialized in. */
1589 unsigned int lto_stmt_uid;
1590 /* Expected frequency of executions within the function.
1591 When set to CGRAPH_FREQ_BASE, the edge is expected to be called once
1592 per function call. The range is 0 to CGRAPH_FREQ_MAX. */
1593 int frequency;
1594 /* Unique id of the edge. */
1595 int uid;
1596 /* Whether this edge was made direct by indirect inlining. */
1597 unsigned int indirect_inlining_edge : 1;
1598 /* Whether this edge describes an indirect call with an undetermined
1599 callee. */
1600 unsigned int indirect_unknown_callee : 1;
1601 /* Whether this edge is still a dangling */
1602 /* True if the corresponding CALL stmt cannot be inlined. */
1603 unsigned int call_stmt_cannot_inline_p : 1;
1604 /* Can this call throw externally? */
1605 unsigned int can_throw_external : 1;
1606 /* Edges with SPECULATIVE flag represents indirect calls that was
1607 speculatively turned into direct (i.e. by profile feedback).
1608 The final code sequence will have form:
1609
1610 if (call_target == expected_fn)
1611 expected_fn ();
1612 else
1613 call_target ();
1614
1615 Every speculative call is represented by three components attached
1616 to a same call statement:
1617 1) a direct call (to expected_fn)
1618 2) an indirect call (to call_target)
1619 3) a IPA_REF_ADDR refrence to expected_fn.
1620
1621 Optimizers may later redirect direct call to clone, so 1) and 3)
1622 do not need to necesarily agree with destination. */
1623 unsigned int speculative : 1;
1624 /* Set to true when caller is a constructor or destructor of polymorphic
1625 type. */
1626 unsigned in_polymorphic_cdtor : 1;
1627
1628 private:
1629 /* Remove the edge from the list of the callers of the callee. */
1630 void remove_caller (void);
1631
1632 /* Remove the edge from the list of the callees of the caller. */
1633 void remove_callee (void);
1634
1635 /* Set callee N of call graph edge and add it to the corresponding set of
1636 callers. */
1637 void set_callee (cgraph_node *n);
1638
1639 /* Output flags of edge to a file F. */
1640 void dump_edge_flags (FILE *f);
1641
1642 /* Verify that call graph edge corresponds to DECL from the associated
1643 statement. Return true if the verification should fail. */
1644 bool verify_corresponds_to_fndecl (tree decl);
1645 };
1646
1647 #define CGRAPH_FREQ_BASE 1000
1648 #define CGRAPH_FREQ_MAX 100000
1649
1650 /* The varpool data structure.
1651 Each static variable decl has assigned varpool_node. */
1652
1653 class GTY((tag ("SYMTAB_VARIABLE"))) varpool_node : public symtab_node {
1654 public:
1655 /* Dump given varpool node to F. */
1656 void dump (FILE *f);
1657
1658 /* Dump given varpool node to stderr. */
1659 void DEBUG_FUNCTION debug (void);
1660
1661 /* Remove variable from symbol table. */
1662 void remove (void);
1663
1664 /* Remove node initializer when it is no longer needed. */
1665 void remove_initializer (void);
1666
1667 void analyze (void);
1668
1669 /* Return variable availability. */
1670 availability get_availability (void);
1671
1672 /* When doing LTO, read variable's constructor from disk if
1673 it is not already present. */
1674 tree get_constructor (void);
1675
1676 /* Return true if variable has constructor that can be used for folding. */
1677 bool ctor_useable_for_folding_p (void);
1678
1679 /* For given variable pool node, walk the alias chain to return the function
1680 the variable is alias of. Do not walk through thunks.
1681 When AVAILABILITY is non-NULL, get minimal availability in the chain. */
1682 inline varpool_node *ultimate_alias_target
1683 (availability *availability = NULL);
1684
1685 /* Return node that alias is aliasing. */
1686 inline varpool_node *get_alias_target (void);
1687
1688 /* Output one variable, if necessary. Return whether we output it. */
1689 bool assemble_decl (void);
1690
1691 /* For variables in named sections make sure get_variable_section
1692 is called before we switch to those sections. Then section
1693 conflicts between read-only and read-only requiring relocations
1694 sections can be resolved. */
1695 void finalize_named_section_flags (void);
1696
1697 /* Call calback on varpool symbol and aliases associated to varpool symbol.
1698 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
1699 skipped. */
1700 bool call_for_symbol_and_aliases (bool (*callback) (varpool_node *, void *),
1701 void *data,
1702 bool include_overwritable);
1703
1704 /* Return true when variable should be considered externally visible. */
1705 bool externally_visible_p (void);
1706
1707 /* Return true when all references to variable must be visible
1708 in ipa_ref_list.
1709 i.e. if the variable is not externally visible or not used in some magic
1710 way (asm statement or such).
1711 The magic uses are all summarized in force_output flag. */
1712 inline bool all_refs_explicit_p ();
1713
1714 /* Return true when variable can be removed from variable pool
1715 if all direct calls are eliminated. */
1716 inline bool can_remove_if_no_refs_p (void);
1717
1718 /* Add the variable DECL to the varpool.
1719 Unlike finalize_decl function is intended to be used
1720 by middle end and allows insertion of new variable at arbitrary point
1721 of compilation. */
1722 static void add (tree decl);
1723
1724 /* Return varpool node for given symbol and check it is a function. */
1725 static inline varpool_node *get (const_tree decl);
1726
1727 /* Mark DECL as finalized. By finalizing the declaration, frontend instruct
1728 the middle end to output the variable to asm file, if needed or externally
1729 visible. */
1730 static void finalize_decl (tree decl);
1731
1732 /* Attempt to mark ALIAS as an alias to DECL. Return TRUE if successful.
1733 Extra name aliases are output whenever DECL is output. */
1734 static varpool_node * create_extra_name_alias (tree alias, tree decl);
1735
1736 /* Attempt to mark ALIAS as an alias to DECL. Return TRUE if successful.
1737 Extra name aliases are output whenever DECL is output. */
1738 static varpool_node * create_alias (tree, tree);
1739
1740 /* Dump the variable pool to F. */
1741 static void dump_varpool (FILE *f);
1742
1743 /* Dump the variable pool to stderr. */
1744 static void DEBUG_FUNCTION debug_varpool (void);
1745
1746 /* Allocate new callgraph node and insert it into basic data structures. */
1747 static varpool_node *create_empty (void);
1748
1749 /* Return varpool node assigned to DECL. Create new one when needed. */
1750 static varpool_node *get_create (tree decl);
1751
1752 /* Given an assembler name, lookup node. */
1753 static varpool_node *get_for_asmname (tree asmname);
1754
1755 /* Set when variable is scheduled to be assembled. */
1756 unsigned output : 1;
1757
1758 /* Set when variable has statically initialized pointer
1759 or is a static bounds variable and needs initalization. */
1760 unsigned need_bounds_init : 1;
1761
1762 /* Set if the variable is dynamically initialized, except for
1763 function local statics. */
1764 unsigned dynamically_initialized : 1;
1765
1766 ENUM_BITFIELD(tls_model) tls_model : 3;
1767
1768 /* Set if the variable is known to be used by single function only.
1769 This is computed by ipa_signle_use pass and used by late optimizations
1770 in places where optimization would be valid for local static variable
1771 if we did not do any inter-procedural code movement. */
1772 unsigned used_by_single_function : 1;
1773
1774 private:
1775 /* Assemble thunks and aliases associated to varpool node. */
1776 void assemble_aliases (void);
1777
1778 /* Worker for call_for_node_and_aliases. */
1779 bool call_for_symbol_and_aliases_1 (bool (*callback) (varpool_node *, void *),
1780 void *data,
1781 bool include_overwritable);
1782 };
1783
1784 /* Every top level asm statement is put into a asm_node. */
1785
1786 struct GTY(()) asm_node {
1787
1788
1789 /* Next asm node. */
1790 asm_node *next;
1791 /* String for this asm node. */
1792 tree asm_str;
1793 /* Ordering of all cgraph nodes. */
1794 int order;
1795 };
1796
1797 /* Report whether or not THIS symtab node is a function, aka cgraph_node. */
1798
1799 template <>
1800 template <>
1801 inline bool
1802 is_a_helper <cgraph_node *>::test (symtab_node *p)
1803 {
1804 return p && p->type == SYMTAB_FUNCTION;
1805 }
1806
1807 /* Report whether or not THIS symtab node is a vriable, aka varpool_node. */
1808
1809 template <>
1810 template <>
1811 inline bool
1812 is_a_helper <varpool_node *>::test (symtab_node *p)
1813 {
1814 return p && p->type == SYMTAB_VARIABLE;
1815 }
1816
1817 /* Macros to access the next item in the list of free cgraph nodes and
1818 edges. */
1819 #define NEXT_FREE_NODE(NODE) dyn_cast<cgraph_node *> ((NODE)->next)
1820 #define SET_NEXT_FREE_NODE(NODE,NODE2) ((NODE))->next = NODE2
1821 #define NEXT_FREE_EDGE(EDGE) (EDGE)->prev_caller
1822
1823 typedef void (*cgraph_edge_hook)(cgraph_edge *, void *);
1824 typedef void (*cgraph_node_hook)(cgraph_node *, void *);
1825 typedef void (*varpool_node_hook)(varpool_node *, void *);
1826 typedef void (*cgraph_2edge_hook)(cgraph_edge *, cgraph_edge *, void *);
1827 typedef void (*cgraph_2node_hook)(cgraph_node *, cgraph_node *, void *);
1828
1829 struct cgraph_edge_hook_list;
1830 struct cgraph_node_hook_list;
1831 struct varpool_node_hook_list;
1832 struct cgraph_2edge_hook_list;
1833 struct cgraph_2node_hook_list;
1834
1835 /* Map from a symbol to initialization/finalization priorities. */
1836 struct GTY(()) symbol_priority_map {
1837 priority_type init;
1838 priority_type fini;
1839 };
1840
1841 enum symtab_state
1842 {
1843 /* Frontend is parsing and finalizing functions. */
1844 PARSING,
1845 /* Callgraph is being constructed. It is safe to add new functions. */
1846 CONSTRUCTION,
1847 /* Callgraph is being streamed-in at LTO time. */
1848 LTO_STREAMING,
1849 /* Callgraph is built and early IPA passes are being run. */
1850 IPA,
1851 /* Callgraph is built and all functions are transformed to SSA form. */
1852 IPA_SSA,
1853 /* All inline decisions are done; it is now possible to remove extern inline
1854 functions and virtual call targets. */
1855 IPA_SSA_AFTER_INLINING,
1856 /* Functions are now ordered and being passed to RTL expanders. */
1857 EXPANSION,
1858 /* All cgraph expansion is done. */
1859 FINISHED
1860 };
1861
1862 struct asmname_hasher : ggc_ptr_hash <symtab_node>
1863 {
1864 typedef const_tree compare_type;
1865
1866 static hashval_t hash (symtab_node *n);
1867 static bool equal (symtab_node *n, const_tree t);
1868 };
1869
1870 class GTY((tag ("SYMTAB"))) symbol_table
1871 {
1872 public:
1873 friend class symtab_node;
1874 friend class cgraph_node;
1875 friend class cgraph_edge;
1876
1877 symbol_table (): cgraph_max_summary_uid (1)
1878 {
1879 }
1880
1881 /* Initialize callgraph dump file. */
1882 void initialize (void);
1883
1884 /* Register a top-level asm statement ASM_STR. */
1885 inline asm_node *finalize_toplevel_asm (tree asm_str);
1886
1887 /* Analyze the whole compilation unit once it is parsed completely. */
1888 void finalize_compilation_unit (void);
1889
1890 /* C++ frontend produce same body aliases all over the place, even before PCH
1891 gets streamed out. It relies on us linking the aliases with their function
1892 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
1893 first produce aliases without links, but once C++ FE is sure he won't sream
1894 PCH we build the links via this function. */
1895 void process_same_body_aliases (void);
1896
1897 /* Perform simple optimizations based on callgraph. */
1898 void compile (void);
1899
1900 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
1901 functions into callgraph in a way so they look like ordinary reachable
1902 functions inserted into callgraph already at construction time. */
1903 void process_new_functions (void);
1904
1905 /* Once all functions from compilation unit are in memory, produce all clones
1906 and update all calls. We might also do this on demand if we don't want to
1907 bring all functions to memory prior compilation, but current WHOPR
1908 implementation does that and it is is bit easier to keep everything right
1909 in this order. */
1910 void materialize_all_clones (void);
1911
1912 /* Register a symbol NODE. */
1913 inline void register_symbol (symtab_node *node);
1914
1915 inline void
1916 clear_asm_symbols (void)
1917 {
1918 asmnodes = NULL;
1919 asm_last_node = NULL;
1920 }
1921
1922 /* Perform reachability analysis and reclaim all unreachable nodes. */
1923 bool remove_unreachable_nodes (FILE *file);
1924
1925 /* Optimization of function bodies might've rendered some variables as
1926 unnecessary so we want to avoid these from being compiled. Re-do
1927 reachability starting from variables that are either externally visible
1928 or was referred from the asm output routines. */
1929 void remove_unreferenced_decls (void);
1930
1931 /* Unregister a symbol NODE. */
1932 inline void unregister (symtab_node *node);
1933
1934 /* Allocate new callgraph node and insert it into basic data structures. */
1935 cgraph_node *create_empty (void);
1936
1937 /* Release a callgraph NODE with UID and put in to the list
1938 of free nodes. */
1939 void release_symbol (cgraph_node *node, int uid);
1940
1941 /* Output all variables enqueued to be assembled. */
1942 bool output_variables (void);
1943
1944 /* Weakrefs may be associated to external decls and thus not output
1945 at expansion time. Emit all necessary aliases. */
1946 void output_weakrefs (void);
1947
1948 /* Return first static symbol with definition. */
1949 inline symtab_node *first_symbol (void);
1950
1951 /* Return first assembler symbol. */
1952 inline asm_node *
1953 first_asm_symbol (void)
1954 {
1955 return asmnodes;
1956 }
1957
1958 /* Return first static symbol with definition. */
1959 inline symtab_node *first_defined_symbol (void);
1960
1961 /* Return first variable. */
1962 inline varpool_node *first_variable (void);
1963
1964 /* Return next variable after NODE. */
1965 inline varpool_node *next_variable (varpool_node *node);
1966
1967 /* Return first static variable with initializer. */
1968 inline varpool_node *first_static_initializer (void);
1969
1970 /* Return next static variable with initializer after NODE. */
1971 inline varpool_node *next_static_initializer (varpool_node *node);
1972
1973 /* Return first static variable with definition. */
1974 inline varpool_node *first_defined_variable (void);
1975
1976 /* Return next static variable with definition after NODE. */
1977 inline varpool_node *next_defined_variable (varpool_node *node);
1978
1979 /* Return first function with body defined. */
1980 inline cgraph_node *first_defined_function (void);
1981
1982 /* Return next function with body defined after NODE. */
1983 inline cgraph_node *next_defined_function (cgraph_node *node);
1984
1985 /* Return first function. */
1986 inline cgraph_node *first_function (void);
1987
1988 /* Return next function. */
1989 inline cgraph_node *next_function (cgraph_node *node);
1990
1991 /* Return first function with body defined. */
1992 cgraph_node *first_function_with_gimple_body (void);
1993
1994 /* Return next reachable static variable with initializer after NODE. */
1995 inline cgraph_node *next_function_with_gimple_body (cgraph_node *node);
1996
1997 /* Register HOOK to be called with DATA on each removed edge. */
1998 cgraph_edge_hook_list *add_edge_removal_hook (cgraph_edge_hook hook,
1999 void *data);
2000
2001 /* Remove ENTRY from the list of hooks called on removing edges. */
2002 void remove_edge_removal_hook (cgraph_edge_hook_list *entry);
2003
2004 /* Register HOOK to be called with DATA on each removed node. */
2005 cgraph_node_hook_list *add_cgraph_removal_hook (cgraph_node_hook hook,
2006 void *data);
2007
2008 /* Remove ENTRY from the list of hooks called on removing nodes. */
2009 void remove_cgraph_removal_hook (cgraph_node_hook_list *entry);
2010
2011 /* Register HOOK to be called with DATA on each removed node. */
2012 varpool_node_hook_list *add_varpool_removal_hook (varpool_node_hook hook,
2013 void *data);
2014
2015 /* Remove ENTRY from the list of hooks called on removing nodes. */
2016 void remove_varpool_removal_hook (varpool_node_hook_list *entry);
2017
2018 /* Register HOOK to be called with DATA on each inserted node. */
2019 cgraph_node_hook_list *add_cgraph_insertion_hook (cgraph_node_hook hook,
2020 void *data);
2021
2022 /* Remove ENTRY from the list of hooks called on inserted nodes. */
2023 void remove_cgraph_insertion_hook (cgraph_node_hook_list *entry);
2024
2025 /* Register HOOK to be called with DATA on each inserted node. */
2026 varpool_node_hook_list *add_varpool_insertion_hook (varpool_node_hook hook,
2027 void *data);
2028
2029 /* Remove ENTRY from the list of hooks called on inserted nodes. */
2030 void remove_varpool_insertion_hook (varpool_node_hook_list *entry);
2031
2032 /* Register HOOK to be called with DATA on each duplicated edge. */
2033 cgraph_2edge_hook_list *add_edge_duplication_hook (cgraph_2edge_hook hook,
2034 void *data);
2035 /* Remove ENTRY from the list of hooks called on duplicating edges. */
2036 void remove_edge_duplication_hook (cgraph_2edge_hook_list *entry);
2037
2038 /* Register HOOK to be called with DATA on each duplicated node. */
2039 cgraph_2node_hook_list *add_cgraph_duplication_hook (cgraph_2node_hook hook,
2040 void *data);
2041
2042 /* Remove ENTRY from the list of hooks called on duplicating nodes. */
2043 void remove_cgraph_duplication_hook (cgraph_2node_hook_list *entry);
2044
2045 /* Call all edge removal hooks. */
2046 void call_edge_removal_hooks (cgraph_edge *e);
2047
2048 /* Call all node insertion hooks. */
2049 void call_cgraph_insertion_hooks (cgraph_node *node);
2050
2051 /* Call all node removal hooks. */
2052 void call_cgraph_removal_hooks (cgraph_node *node);
2053
2054 /* Call all node duplication hooks. */
2055 void call_cgraph_duplication_hooks (cgraph_node *node, cgraph_node *node2);
2056
2057 /* Call all edge duplication hooks. */
2058 void call_edge_duplication_hooks (cgraph_edge *cs1, cgraph_edge *cs2);
2059
2060 /* Call all node removal hooks. */
2061 void call_varpool_removal_hooks (varpool_node *node);
2062
2063 /* Call all node insertion hooks. */
2064 void call_varpool_insertion_hooks (varpool_node *node);
2065
2066 /* Arrange node to be first in its entry of assembler_name_hash. */
2067 void symtab_prevail_in_asm_name_hash (symtab_node *node);
2068
2069 /* Initalize asm name hash unless. */
2070 void symtab_initialize_asm_name_hash (void);
2071
2072 /* Set the DECL_ASSEMBLER_NAME and update symtab hashtables. */
2073 void change_decl_assembler_name (tree decl, tree name);
2074
2075 int cgraph_count;
2076 int cgraph_max_uid;
2077 int cgraph_max_summary_uid;
2078
2079 int edges_count;
2080 int edges_max_uid;
2081
2082 symtab_node* GTY(()) nodes;
2083 asm_node* GTY(()) asmnodes;
2084 asm_node* GTY(()) asm_last_node;
2085 cgraph_node* GTY(()) free_nodes;
2086
2087 /* Head of a linked list of unused (freed) call graph edges.
2088 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
2089 cgraph_edge * GTY(()) free_edges;
2090
2091 /* The order index of the next symtab node to be created. This is
2092 used so that we can sort the cgraph nodes in order by when we saw
2093 them, to support -fno-toplevel-reorder. */
2094 int order;
2095
2096 /* Set when whole unit has been analyzed so we can access global info. */
2097 bool global_info_ready;
2098 /* What state callgraph is in right now. */
2099 enum symtab_state state;
2100 /* Set when the cgraph is fully build and the basic flags are computed. */
2101 bool function_flags_ready;
2102
2103 bool cpp_implicit_aliases_done;
2104
2105 /* Hash table used to hold sectoons. */
2106 hash_table<section_name_hasher> *GTY(()) section_hash;
2107
2108 /* Hash table used to convert assembler names into nodes. */
2109 hash_table<asmname_hasher> *assembler_name_hash;
2110
2111 /* Hash table used to hold init priorities. */
2112 hash_map<symtab_node *, symbol_priority_map> *init_priority_hash;
2113
2114 FILE* GTY ((skip)) dump_file;
2115
2116 private:
2117 /* Allocate new callgraph node. */
2118 inline cgraph_node * allocate_cgraph_symbol (void);
2119
2120 /* Allocate a cgraph_edge structure and fill it with data according to the
2121 parameters of which only CALLEE can be NULL (when creating an indirect call
2122 edge). */
2123 cgraph_edge *create_edge (cgraph_node *caller, cgraph_node *callee,
2124 gcall *call_stmt, gcov_type count, int freq,
2125 bool indir_unknown_callee);
2126
2127 /* Put the edge onto the free list. */
2128 void free_edge (cgraph_edge *e);
2129
2130 /* Insert NODE to assembler name hash. */
2131 void insert_to_assembler_name_hash (symtab_node *node, bool with_clones);
2132
2133 /* Remove NODE from assembler name hash. */
2134 void unlink_from_assembler_name_hash (symtab_node *node, bool with_clones);
2135
2136 /* Hash asmnames ignoring the user specified marks. */
2137 static hashval_t decl_assembler_name_hash (const_tree asmname);
2138
2139 /* Compare ASMNAME with the DECL_ASSEMBLER_NAME of DECL. */
2140 static bool decl_assembler_name_equal (tree decl, const_tree asmname);
2141
2142 friend struct asmname_hasher;
2143
2144 /* List of hooks triggered when an edge is removed. */
2145 cgraph_edge_hook_list * GTY((skip)) m_first_edge_removal_hook;
2146 /* List of hooks triggem_red when a cgraph node is removed. */
2147 cgraph_node_hook_list * GTY((skip)) m_first_cgraph_removal_hook;
2148 /* List of hooks triggered when an edge is duplicated. */
2149 cgraph_2edge_hook_list * GTY((skip)) m_first_edge_duplicated_hook;
2150 /* List of hooks triggered when a node is duplicated. */
2151 cgraph_2node_hook_list * GTY((skip)) m_first_cgraph_duplicated_hook;
2152 /* List of hooks triggered when an function is inserted. */
2153 cgraph_node_hook_list * GTY((skip)) m_first_cgraph_insertion_hook;
2154 /* List of hooks triggered when an variable is inserted. */
2155 varpool_node_hook_list * GTY((skip)) m_first_varpool_insertion_hook;
2156 /* List of hooks triggered when a node is removed. */
2157 varpool_node_hook_list * GTY((skip)) m_first_varpool_removal_hook;
2158 };
2159
2160 extern GTY(()) symbol_table *symtab;
2161
2162 extern vec<cgraph_node *> cgraph_new_nodes;
2163
2164 inline hashval_t
2165 asmname_hasher::hash (symtab_node *n)
2166 {
2167 return symbol_table::decl_assembler_name_hash
2168 (DECL_ASSEMBLER_NAME (n->decl));
2169 }
2170
2171 inline bool
2172 asmname_hasher::equal (symtab_node *n, const_tree t)
2173 {
2174 return symbol_table::decl_assembler_name_equal (n->decl, t);
2175 }
2176
2177 /* In cgraph.c */
2178 void cgraph_c_finalize (void);
2179 void release_function_body (tree);
2180 cgraph_indirect_call_info *cgraph_allocate_init_indirect_info (void);
2181
2182 void cgraph_update_edges_for_call_stmt (gimple, tree, gimple);
2183 bool cgraph_function_possibly_inlined_p (tree);
2184
2185 const char* cgraph_inline_failed_string (cgraph_inline_failed_t);
2186 cgraph_inline_failed_type_t cgraph_inline_failed_type (cgraph_inline_failed_t);
2187
2188 extern bool gimple_check_call_matching_types (gimple, tree, bool);
2189
2190 /* In cgraphunit.c */
2191 void cgraphunit_c_finalize (void);
2192
2193 /* Initialize datastructures so DECL is a function in lowered gimple form.
2194 IN_SSA is true if the gimple is in SSA. */
2195 basic_block init_lowered_empty_function (tree, bool, gcov_type);
2196
2197 /* In cgraphclones.c */
2198
2199 tree clone_function_name_1 (const char *, const char *);
2200 tree clone_function_name (tree decl, const char *);
2201
2202 void tree_function_versioning (tree, tree, vec<ipa_replace_map *, va_gc> *,
2203 bool, bitmap, bool, bitmap, basic_block);
2204
2205 /* In cgraphbuild.c */
2206 int compute_call_stmt_bb_frequency (tree, basic_block bb);
2207 void record_references_in_initializer (tree, bool);
2208
2209 /* In ipa.c */
2210 void cgraph_build_static_cdtor (char which, tree body, int priority);
2211 bool ipa_discover_readonly_nonaddressable_vars (void);
2212
2213 /* In varpool.c */
2214 tree ctor_for_folding (tree);
2215
2216 /* In tree-chkp.c */
2217 extern bool chkp_function_instrumented_p (tree fndecl);
2218
2219 /* Return true when the symbol is real symbol, i.e. it is not inline clone
2220 or abstract function kept for debug info purposes only. */
2221 inline bool
2222 symtab_node::real_symbol_p (void)
2223 {
2224 cgraph_node *cnode;
2225
2226 if (DECL_ABSTRACT_P (decl))
2227 return false;
2228 if (!is_a <cgraph_node *> (this))
2229 return true;
2230 cnode = dyn_cast <cgraph_node *> (this);
2231 if (cnode->global.inlined_to)
2232 return false;
2233 return true;
2234 }
2235
2236 /* Return true if DECL should have entry in symbol table if used.
2237 Those are functions and static & external veriables*/
2238
2239 static inline bool
2240 decl_in_symtab_p (const_tree decl)
2241 {
2242 return (TREE_CODE (decl) == FUNCTION_DECL
2243 || (TREE_CODE (decl) == VAR_DECL
2244 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl))));
2245 }
2246
2247 inline bool
2248 symtab_node::in_same_comdat_group_p (symtab_node *target)
2249 {
2250 symtab_node *source = this;
2251
2252 if (cgraph_node *cn = dyn_cast <cgraph_node *> (target))
2253 {
2254 if (cn->global.inlined_to)
2255 source = cn->global.inlined_to;
2256 }
2257 if (cgraph_node *cn = dyn_cast <cgraph_node *> (target))
2258 {
2259 if (cn->global.inlined_to)
2260 target = cn->global.inlined_to;
2261 }
2262
2263 return source->get_comdat_group () == target->get_comdat_group ();
2264 }
2265
2266 /* Return node that alias is aliasing. */
2267
2268 inline symtab_node *
2269 symtab_node::get_alias_target (void)
2270 {
2271 ipa_ref *ref = NULL;
2272 iterate_reference (0, ref);
2273 if (ref->use == IPA_REF_CHKP)
2274 iterate_reference (1, ref);
2275 gcc_checking_assert (ref->use == IPA_REF_ALIAS);
2276 return ref->referred;
2277 }
2278
2279 /* Return next reachable static symbol with initializer after the node. */
2280
2281 inline symtab_node *
2282 symtab_node::next_defined_symbol (void)
2283 {
2284 symtab_node *node1 = next;
2285
2286 for (; node1; node1 = node1->next)
2287 if (node1->definition)
2288 return node1;
2289
2290 return NULL;
2291 }
2292
2293 /* Iterates I-th reference in the list, REF is also set. */
2294
2295 inline ipa_ref *
2296 symtab_node::iterate_reference (unsigned i, ipa_ref *&ref)
2297 {
2298 vec_safe_iterate (ref_list.references, i, &ref);
2299
2300 return ref;
2301 }
2302
2303 /* Iterates I-th referring item in the list, REF is also set. */
2304
2305 inline ipa_ref *
2306 symtab_node::iterate_referring (unsigned i, ipa_ref *&ref)
2307 {
2308 ref_list.referring.iterate (i, &ref);
2309
2310 return ref;
2311 }
2312
2313 /* Iterates I-th referring alias item in the list, REF is also set. */
2314
2315 inline ipa_ref *
2316 symtab_node::iterate_direct_aliases (unsigned i, ipa_ref *&ref)
2317 {
2318 ref_list.referring.iterate (i, &ref);
2319
2320 if (ref && ref->use != IPA_REF_ALIAS)
2321 return NULL;
2322
2323 return ref;
2324 }
2325
2326 /* Return true if list contains an alias. */
2327
2328 inline bool
2329 symtab_node::has_aliases_p (void)
2330 {
2331 ipa_ref *ref = NULL;
2332
2333 return (iterate_direct_aliases (0, ref) != NULL);
2334 }
2335
2336 /* Return true when RESOLUTION indicate that linker will use
2337 the symbol from non-LTO object files. */
2338
2339 inline bool
2340 resolution_used_from_other_file_p (enum ld_plugin_symbol_resolution resolution)
2341 {
2342 return (resolution == LDPR_PREVAILING_DEF
2343 || resolution == LDPR_PREEMPTED_REG
2344 || resolution == LDPR_RESOLVED_EXEC
2345 || resolution == LDPR_RESOLVED_DYN);
2346 }
2347
2348 /* Return true when symtab_node is known to be used from other (non-LTO)
2349 object file. Known only when doing LTO via linker plugin. */
2350
2351 inline bool
2352 symtab_node::used_from_object_file_p (void)
2353 {
2354 if (!TREE_PUBLIC (decl) || DECL_EXTERNAL (decl))
2355 return false;
2356 if (resolution_used_from_other_file_p (resolution))
2357 return true;
2358 return false;
2359 }
2360
2361 /* Return varpool node for given symbol and check it is a function. */
2362
2363 inline varpool_node *
2364 varpool_node::get (const_tree decl)
2365 {
2366 gcc_checking_assert (TREE_CODE (decl) == VAR_DECL);
2367 return dyn_cast<varpool_node *> (symtab_node::get (decl));
2368 }
2369
2370 /* Register a symbol NODE. */
2371
2372 inline void
2373 symbol_table::register_symbol (symtab_node *node)
2374 {
2375 node->next = nodes;
2376 node->previous = NULL;
2377
2378 if (nodes)
2379 nodes->previous = node;
2380 nodes = node;
2381
2382 node->order = order++;
2383 }
2384
2385 /* Register a top-level asm statement ASM_STR. */
2386
2387 asm_node *
2388 symbol_table::finalize_toplevel_asm (tree asm_str)
2389 {
2390 asm_node *node;
2391
2392 node = ggc_cleared_alloc<asm_node> ();
2393 node->asm_str = asm_str;
2394 node->order = order++;
2395 node->next = NULL;
2396
2397 if (asmnodes == NULL)
2398 asmnodes = node;
2399 else
2400 asm_last_node->next = node;
2401
2402 asm_last_node = node;
2403 return node;
2404 }
2405
2406 /* Unregister a symbol NODE. */
2407 inline void
2408 symbol_table::unregister (symtab_node *node)
2409 {
2410 if (node->previous)
2411 node->previous->next = node->next;
2412 else
2413 nodes = node->next;
2414
2415 if (node->next)
2416 node->next->previous = node->previous;
2417
2418 node->next = NULL;
2419 node->previous = NULL;
2420 }
2421
2422 /* Release a callgraph NODE with UID and put in to the list of free nodes. */
2423
2424 inline void
2425 symbol_table::release_symbol (cgraph_node *node, int uid)
2426 {
2427 cgraph_count--;
2428
2429 /* Clear out the node to NULL all pointers and add the node to the free
2430 list. */
2431 memset (node, 0, sizeof (*node));
2432 node->type = SYMTAB_FUNCTION;
2433 node->uid = uid;
2434 SET_NEXT_FREE_NODE (node, free_nodes);
2435 free_nodes = node;
2436 }
2437
2438 /* Allocate new callgraph node. */
2439
2440 inline cgraph_node *
2441 symbol_table::allocate_cgraph_symbol (void)
2442 {
2443 cgraph_node *node;
2444
2445 if (free_nodes)
2446 {
2447 node = free_nodes;
2448 free_nodes = NEXT_FREE_NODE (node);
2449 }
2450 else
2451 {
2452 node = ggc_cleared_alloc<cgraph_node> ();
2453 node->uid = cgraph_max_uid++;
2454 }
2455
2456 node->summary_uid = cgraph_max_summary_uid++;
2457 return node;
2458 }
2459
2460
2461 /* Return first static symbol with definition. */
2462 inline symtab_node *
2463 symbol_table::first_symbol (void)
2464 {
2465 return nodes;
2466 }
2467
2468 /* Walk all symbols. */
2469 #define FOR_EACH_SYMBOL(node) \
2470 for ((node) = symtab->first_symbol (); (node); (node) = (node)->next)
2471
2472 /* Return first static symbol with definition. */
2473 inline symtab_node *
2474 symbol_table::first_defined_symbol (void)
2475 {
2476 symtab_node *node;
2477
2478 for (node = nodes; node; node = node->next)
2479 if (node->definition)
2480 return node;
2481
2482 return NULL;
2483 }
2484
2485 /* Walk all symbols with definitions in current unit. */
2486 #define FOR_EACH_DEFINED_SYMBOL(node) \
2487 for ((node) = symtab->first_defined_symbol (); (node); \
2488 (node) = node->next_defined_symbol ())
2489
2490 /* Return first variable. */
2491 inline varpool_node *
2492 symbol_table::first_variable (void)
2493 {
2494 symtab_node *node;
2495 for (node = nodes; node; node = node->next)
2496 if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
2497 return vnode;
2498 return NULL;
2499 }
2500
2501 /* Return next variable after NODE. */
2502 inline varpool_node *
2503 symbol_table::next_variable (varpool_node *node)
2504 {
2505 symtab_node *node1 = node->next;
2506 for (; node1; node1 = node1->next)
2507 if (varpool_node *vnode1 = dyn_cast <varpool_node *> (node1))
2508 return vnode1;
2509 return NULL;
2510 }
2511 /* Walk all variables. */
2512 #define FOR_EACH_VARIABLE(node) \
2513 for ((node) = symtab->first_variable (); \
2514 (node); \
2515 (node) = symtab->next_variable ((node)))
2516
2517 /* Return first static variable with initializer. */
2518 inline varpool_node *
2519 symbol_table::first_static_initializer (void)
2520 {
2521 symtab_node *node;
2522 for (node = nodes; node; node = node->next)
2523 {
2524 varpool_node *vnode = dyn_cast <varpool_node *> (node);
2525 if (vnode && DECL_INITIAL (node->decl))
2526 return vnode;
2527 }
2528 return NULL;
2529 }
2530
2531 /* Return next static variable with initializer after NODE. */
2532 inline varpool_node *
2533 symbol_table::next_static_initializer (varpool_node *node)
2534 {
2535 symtab_node *node1 = node->next;
2536 for (; node1; node1 = node1->next)
2537 {
2538 varpool_node *vnode1 = dyn_cast <varpool_node *> (node1);
2539 if (vnode1 && DECL_INITIAL (node1->decl))
2540 return vnode1;
2541 }
2542 return NULL;
2543 }
2544
2545 /* Walk all static variables with initializer set. */
2546 #define FOR_EACH_STATIC_INITIALIZER(node) \
2547 for ((node) = symtab->first_static_initializer (); (node); \
2548 (node) = symtab->next_static_initializer (node))
2549
2550 /* Return first static variable with definition. */
2551 inline varpool_node *
2552 symbol_table::first_defined_variable (void)
2553 {
2554 symtab_node *node;
2555 for (node = nodes; node; node = node->next)
2556 {
2557 varpool_node *vnode = dyn_cast <varpool_node *> (node);
2558 if (vnode && vnode->definition)
2559 return vnode;
2560 }
2561 return NULL;
2562 }
2563
2564 /* Return next static variable with definition after NODE. */
2565 inline varpool_node *
2566 symbol_table::next_defined_variable (varpool_node *node)
2567 {
2568 symtab_node *node1 = node->next;
2569 for (; node1; node1 = node1->next)
2570 {
2571 varpool_node *vnode1 = dyn_cast <varpool_node *> (node1);
2572 if (vnode1 && vnode1->definition)
2573 return vnode1;
2574 }
2575 return NULL;
2576 }
2577 /* Walk all variables with definitions in current unit. */
2578 #define FOR_EACH_DEFINED_VARIABLE(node) \
2579 for ((node) = symtab->first_defined_variable (); (node); \
2580 (node) = symtab->next_defined_variable (node))
2581
2582 /* Return first function with body defined. */
2583 inline cgraph_node *
2584 symbol_table::first_defined_function (void)
2585 {
2586 symtab_node *node;
2587 for (node = nodes; node; node = node->next)
2588 {
2589 cgraph_node *cn = dyn_cast <cgraph_node *> (node);
2590 if (cn && cn->definition)
2591 return cn;
2592 }
2593 return NULL;
2594 }
2595
2596 /* Return next function with body defined after NODE. */
2597 inline cgraph_node *
2598 symbol_table::next_defined_function (cgraph_node *node)
2599 {
2600 symtab_node *node1 = node->next;
2601 for (; node1; node1 = node1->next)
2602 {
2603 cgraph_node *cn1 = dyn_cast <cgraph_node *> (node1);
2604 if (cn1 && cn1->definition)
2605 return cn1;
2606 }
2607 return NULL;
2608 }
2609
2610 /* Walk all functions with body defined. */
2611 #define FOR_EACH_DEFINED_FUNCTION(node) \
2612 for ((node) = symtab->first_defined_function (); (node); \
2613 (node) = symtab->next_defined_function ((node)))
2614
2615 /* Return first function. */
2616 inline cgraph_node *
2617 symbol_table::first_function (void)
2618 {
2619 symtab_node *node;
2620 for (node = nodes; node; node = node->next)
2621 if (cgraph_node *cn = dyn_cast <cgraph_node *> (node))
2622 return cn;
2623 return NULL;
2624 }
2625
2626 /* Return next function. */
2627 inline cgraph_node *
2628 symbol_table::next_function (cgraph_node *node)
2629 {
2630 symtab_node *node1 = node->next;
2631 for (; node1; node1 = node1->next)
2632 if (cgraph_node *cn1 = dyn_cast <cgraph_node *> (node1))
2633 return cn1;
2634 return NULL;
2635 }
2636
2637 /* Return first function with body defined. */
2638 inline cgraph_node *
2639 symbol_table::first_function_with_gimple_body (void)
2640 {
2641 symtab_node *node;
2642 for (node = nodes; node; node = node->next)
2643 {
2644 cgraph_node *cn = dyn_cast <cgraph_node *> (node);
2645 if (cn && cn->has_gimple_body_p ())
2646 return cn;
2647 }
2648 return NULL;
2649 }
2650
2651 /* Return next reachable static variable with initializer after NODE. */
2652 inline cgraph_node *
2653 symbol_table::next_function_with_gimple_body (cgraph_node *node)
2654 {
2655 symtab_node *node1 = node->next;
2656 for (; node1; node1 = node1->next)
2657 {
2658 cgraph_node *cn1 = dyn_cast <cgraph_node *> (node1);
2659 if (cn1 && cn1->has_gimple_body_p ())
2660 return cn1;
2661 }
2662 return NULL;
2663 }
2664
2665 /* Walk all functions. */
2666 #define FOR_EACH_FUNCTION(node) \
2667 for ((node) = symtab->first_function (); (node); \
2668 (node) = symtab->next_function ((node)))
2669
2670 /* Return true when callgraph node is a function with Gimple body defined
2671 in current unit. Functions can also be define externally or they
2672 can be thunks with no Gimple representation.
2673
2674 Note that at WPA stage, the function body may not be present in memory. */
2675
2676 inline bool
2677 cgraph_node::has_gimple_body_p (void)
2678 {
2679 return definition && !thunk.thunk_p && !alias;
2680 }
2681
2682 /* Walk all functions with body defined. */
2683 #define FOR_EACH_FUNCTION_WITH_GIMPLE_BODY(node) \
2684 for ((node) = symtab->first_function_with_gimple_body (); (node); \
2685 (node) = symtab->next_function_with_gimple_body (node))
2686
2687 /* Uniquize all constants that appear in memory.
2688 Each constant in memory thus far output is recorded
2689 in `const_desc_table'. */
2690
2691 struct GTY((for_user)) constant_descriptor_tree {
2692 /* A MEM for the constant. */
2693 rtx rtl;
2694
2695 /* The value of the constant. */
2696 tree value;
2697
2698 /* Hash of value. Computing the hash from value each time
2699 hashfn is called can't work properly, as that means recursive
2700 use of the hash table during hash table expansion. */
2701 hashval_t hash;
2702 };
2703
2704 /* Return true when function is only called directly or it has alias.
2705 i.e. it is not externally visible, address was not taken and
2706 it is not used in any other non-standard way. */
2707
2708 inline bool
2709 cgraph_node::only_called_directly_or_aliased_p (void)
2710 {
2711 gcc_assert (!global.inlined_to);
2712 return (!force_output && !address_taken
2713 && !used_from_other_partition
2714 && !DECL_VIRTUAL_P (decl)
2715 && !DECL_STATIC_CONSTRUCTOR (decl)
2716 && !DECL_STATIC_DESTRUCTOR (decl)
2717 && !used_from_object_file_p ()
2718 && !externally_visible);
2719 }
2720
2721 /* Return true when function can be removed from callgraph
2722 if all direct calls are eliminated. */
2723
2724 inline bool
2725 cgraph_node::can_remove_if_no_direct_calls_and_refs_p (void)
2726 {
2727 gcc_checking_assert (!global.inlined_to);
2728 /* Instrumentation clones should not be removed before
2729 instrumentation happens. New callers may appear after
2730 instrumentation. */
2731 if (instrumentation_clone
2732 && !chkp_function_instrumented_p (decl))
2733 return false;
2734 /* Extern inlines can always go, we will use the external definition. */
2735 if (DECL_EXTERNAL (decl))
2736 return true;
2737 /* When function is needed, we can not remove it. */
2738 if (force_output || used_from_other_partition)
2739 return false;
2740 if (DECL_STATIC_CONSTRUCTOR (decl)
2741 || DECL_STATIC_DESTRUCTOR (decl))
2742 return false;
2743 /* Only COMDAT functions can be removed if externally visible. */
2744 if (externally_visible
2745 && (!DECL_COMDAT (decl)
2746 || forced_by_abi
2747 || used_from_object_file_p ()))
2748 return false;
2749 return true;
2750 }
2751
2752 /* Return true when variable can be removed from variable pool
2753 if all direct calls are eliminated. */
2754
2755 inline bool
2756 varpool_node::can_remove_if_no_refs_p (void)
2757 {
2758 if (DECL_EXTERNAL (decl))
2759 return true;
2760 return (!force_output && !used_from_other_partition
2761 && ((DECL_COMDAT (decl)
2762 && !forced_by_abi
2763 && !used_from_object_file_p ())
2764 || !externally_visible
2765 || DECL_HAS_VALUE_EXPR_P (decl)));
2766 }
2767
2768 /* Return true when all references to variable must be visible in ipa_ref_list.
2769 i.e. if the variable is not externally visible or not used in some magic
2770 way (asm statement or such).
2771 The magic uses are all summarized in force_output flag. */
2772
2773 inline bool
2774 varpool_node::all_refs_explicit_p ()
2775 {
2776 return (definition
2777 && !externally_visible
2778 && !used_from_other_partition
2779 && !force_output);
2780 }
2781
2782 struct tree_descriptor_hasher : ggc_ptr_hash<constant_descriptor_tree>
2783 {
2784 static hashval_t hash (constant_descriptor_tree *);
2785 static bool equal (constant_descriptor_tree *, constant_descriptor_tree *);
2786 };
2787
2788 /* Constant pool accessor function. */
2789 hash_table<tree_descriptor_hasher> *constant_pool_htab (void);
2790
2791 /* Return node that alias is aliasing. */
2792
2793 inline cgraph_node *
2794 cgraph_node::get_alias_target (void)
2795 {
2796 return dyn_cast <cgraph_node *> (symtab_node::get_alias_target ());
2797 }
2798
2799 /* Return node that alias is aliasing. */
2800
2801 inline varpool_node *
2802 varpool_node::get_alias_target (void)
2803 {
2804 return dyn_cast <varpool_node *> (symtab_node::get_alias_target ());
2805 }
2806
2807 /* Walk the alias chain to return the symbol NODE is alias of.
2808 If NODE is not an alias, return NODE.
2809 When AVAILABILITY is non-NULL, get minimal availability in the chain. */
2810
2811 inline symtab_node *
2812 symtab_node::ultimate_alias_target (enum availability *availability)
2813 {
2814 if (!alias)
2815 {
2816 if (availability)
2817 *availability = get_availability ();
2818 return this;
2819 }
2820
2821 return ultimate_alias_target_1 (availability);
2822 }
2823
2824 /* Given function symbol, walk the alias chain to return the function node
2825 is alias of. Do not walk through thunks.
2826 When AVAILABILITY is non-NULL, get minimal availability in the chain. */
2827
2828 inline cgraph_node *
2829 cgraph_node::ultimate_alias_target (enum availability *availability)
2830 {
2831 cgraph_node *n = dyn_cast <cgraph_node *>
2832 (symtab_node::ultimate_alias_target (availability));
2833 if (!n && availability)
2834 *availability = AVAIL_NOT_AVAILABLE;
2835 return n;
2836 }
2837
2838 /* For given variable pool node, walk the alias chain to return the function
2839 the variable is alias of. Do not walk through thunks.
2840 When AVAILABILITY is non-NULL, get minimal availability in the chain. */
2841
2842 inline varpool_node *
2843 varpool_node::ultimate_alias_target (availability *availability)
2844 {
2845 varpool_node *n = dyn_cast <varpool_node *>
2846 (symtab_node::ultimate_alias_target (availability));
2847
2848 if (!n && availability)
2849 *availability = AVAIL_NOT_AVAILABLE;
2850 return n;
2851 }
2852
2853 /* Set callee N of call graph edge and add it to the corresponding set of
2854 callers. */
2855
2856 inline void
2857 cgraph_edge::set_callee (cgraph_node *n)
2858 {
2859 prev_caller = NULL;
2860 if (n->callers)
2861 n->callers->prev_caller = this;
2862 next_caller = n->callers;
2863 n->callers = this;
2864 callee = n;
2865 }
2866
2867 /* Redirect callee of the edge to N. The function does not update underlying
2868 call expression. */
2869
2870 inline void
2871 cgraph_edge::redirect_callee (cgraph_node *n)
2872 {
2873 /* Remove from callers list of the current callee. */
2874 remove_callee ();
2875
2876 /* Insert to callers list of the new callee. */
2877 set_callee (n);
2878 }
2879
2880 /* Return true when the edge represents a direct recursion. */
2881
2882 inline bool
2883 cgraph_edge::recursive_p (void)
2884 {
2885 cgraph_node *c = callee->ultimate_alias_target ();
2886 if (caller->global.inlined_to)
2887 return caller->global.inlined_to->decl == c->decl;
2888 else
2889 return caller->decl == c->decl;
2890 }
2891
2892 /* Remove the edge from the list of the callers of the callee. */
2893
2894 inline void
2895 cgraph_edge::remove_callee (void)
2896 {
2897 gcc_assert (!indirect_unknown_callee);
2898 if (prev_caller)
2899 prev_caller->next_caller = next_caller;
2900 if (next_caller)
2901 next_caller->prev_caller = prev_caller;
2902 if (!prev_caller)
2903 callee->callers = next_caller;
2904 }
2905
2906 /* Return true if the TM_CLONE bit is set for a given FNDECL. */
2907 static inline bool
2908 decl_is_tm_clone (const_tree fndecl)
2909 {
2910 cgraph_node *n = cgraph_node::get (fndecl);
2911 if (n)
2912 return n->tm_clone;
2913 return false;
2914 }
2915
2916 /* Likewise indicate that a node is needed, i.e. reachable via some
2917 external means. */
2918
2919 inline void
2920 cgraph_node::mark_force_output (void)
2921 {
2922 force_output = 1;
2923 gcc_checking_assert (!global.inlined_to);
2924 }
2925
2926 /* Return true if function should be optimized for size. */
2927
2928 inline bool
2929 cgraph_node::optimize_for_size_p (void)
2930 {
2931 if (opt_for_fn (decl, optimize_size))
2932 return true;
2933 if (frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
2934 return true;
2935 else
2936 return false;
2937 }
2938
2939 /* Return symtab_node for NODE or create one if it is not present
2940 in symtab. */
2941
2942 inline symtab_node *
2943 symtab_node::get_create (tree node)
2944 {
2945 if (TREE_CODE (node) == VAR_DECL)
2946 return varpool_node::get_create (node);
2947 else
2948 return cgraph_node::get_create (node);
2949 }
2950
2951 /* Return availability of NODE. */
2952
2953 inline enum availability
2954 symtab_node::get_availability (void)
2955 {
2956 if (is_a <cgraph_node *> (this))
2957 return dyn_cast <cgraph_node *> (this)->get_availability ();
2958 else
2959 return dyn_cast <varpool_node *> (this)->get_availability ();
2960 }
2961
2962 /* Call calback on symtab node and aliases associated to this node.
2963 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
2964 skipped. */
2965
2966 inline bool
2967 symtab_node::call_for_symbol_and_aliases (bool (*callback) (symtab_node *,
2968 void *),
2969 void *data,
2970 bool include_overwritable)
2971 {
2972 if (callback (this, data))
2973 return true;
2974 if (has_aliases_p ())
2975 return call_for_symbol_and_aliases_1 (callback, data, include_overwritable);
2976 return false;
2977 }
2978
2979 /* Call callback on function and aliases associated to the function.
2980 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
2981 skipped. */
2982
2983 inline bool
2984 cgraph_node::call_for_symbol_and_aliases (bool (*callback) (cgraph_node *,
2985 void *),
2986 void *data,
2987 bool include_overwritable)
2988 {
2989 if (callback (this, data))
2990 return true;
2991 if (has_aliases_p ())
2992 return call_for_symbol_and_aliases_1 (callback, data, include_overwritable);
2993 return false;
2994 }
2995
2996 /* Call calback on varpool symbol and aliases associated to varpool symbol.
2997 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
2998 skipped. */
2999
3000 inline bool
3001 varpool_node::call_for_symbol_and_aliases (bool (*callback) (varpool_node *,
3002 void *),
3003 void *data,
3004 bool include_overwritable)
3005 {
3006 if (callback (this, data))
3007 return true;
3008 if (has_aliases_p ())
3009 return call_for_symbol_and_aliases_1 (callback, data, include_overwritable);
3010 return false;
3011 }
3012
3013 /* Return true if refernece may be used in address compare. */
3014
3015 inline bool
3016 ipa_ref::address_matters_p ()
3017 {
3018 if (use != IPA_REF_ADDR)
3019 return false;
3020 /* Addresses taken from virtual tables are never compared. */
3021 if (is_a <varpool_node *> (referring)
3022 && DECL_VIRTUAL_P (referring->decl))
3023 return false;
3024 return referred->address_can_be_compared_p ();
3025 }
3026
3027 /* Build polymorphic call context for indirect call E. */
3028
3029 inline
3030 ipa_polymorphic_call_context::ipa_polymorphic_call_context (cgraph_edge *e)
3031 {
3032 gcc_checking_assert (e->indirect_info->polymorphic);
3033 *this = e->indirect_info->context;
3034 }
3035
3036 /* Build empty "I know nothing" context. */
3037
3038 inline
3039 ipa_polymorphic_call_context::ipa_polymorphic_call_context ()
3040 {
3041 clear_speculation ();
3042 clear_outer_type ();
3043 invalid = false;
3044 }
3045
3046 /* Make context non-speculative. */
3047
3048 inline void
3049 ipa_polymorphic_call_context::clear_speculation ()
3050 {
3051 speculative_outer_type = NULL;
3052 speculative_offset = 0;
3053 speculative_maybe_derived_type = false;
3054 }
3055
3056 /* Produce context specifying all derrived types of OTR_TYPE. If OTR_TYPE is
3057 NULL, the context is set to dummy "I know nothing" setting. */
3058
3059 inline void
3060 ipa_polymorphic_call_context::clear_outer_type (tree otr_type)
3061 {
3062 outer_type = otr_type ? TYPE_MAIN_VARIANT (otr_type) : NULL;
3063 offset = 0;
3064 maybe_derived_type = true;
3065 maybe_in_construction = true;
3066 dynamic = true;
3067 }
3068
3069 /* Adjust all offsets in contexts by OFF bits. */
3070
3071 inline void
3072 ipa_polymorphic_call_context::offset_by (HOST_WIDE_INT off)
3073 {
3074 if (outer_type)
3075 offset += off;
3076 if (speculative_outer_type)
3077 speculative_offset += off;
3078 }
3079
3080 /* Return TRUE if context is fully useless. */
3081
3082 inline bool
3083 ipa_polymorphic_call_context::useless_p () const
3084 {
3085 return (!outer_type && !speculative_outer_type);
3086 }
3087
3088 /* Return true if NODE is local. Instrumentation clones are counted as local
3089 only when original function is local. */
3090
3091 static inline bool
3092 cgraph_local_p (cgraph_node *node)
3093 {
3094 if (!node->instrumentation_clone || !node->instrumented_version)
3095 return node->local.local;
3096
3097 return node->local.local && node->instrumented_version->local.local;
3098 }
3099
3100 /* When using fprintf (or similar), problems can arise with
3101 transient generated strings. Many string-generation APIs
3102 only support one result being alive at once (e.g. by
3103 returning a pointer to a statically-allocated buffer).
3104
3105 If there is more than one generated string within one
3106 fprintf call: the first string gets evicted or overwritten
3107 by the second, before fprintf is fully evaluated.
3108 See e.g. PR/53136.
3109
3110 This function provides a workaround for this, by providing
3111 a simple way to create copies of these transient strings,
3112 without the need to have explicit cleanup:
3113
3114 fprintf (dumpfile, "string 1: %s string 2:%s\n",
3115 xstrdup_for_dump (EXPR_1),
3116 xstrdup_for_dump (EXPR_2));
3117
3118 This is actually a simple wrapper around ggc_strdup, but
3119 the name documents the intent. We require that no GC can occur
3120 within the fprintf call. */
3121
3122 static inline const char *
3123 xstrdup_for_dump (const char *transient_str)
3124 {
3125 return ggc_strdup (transient_str);
3126 }
3127
3128 #endif /* GCC_CGRAPH_H */