69b5d0a8481a7ae8e55b6e34919592ca4d876b15
[gcc.git] / gcc / cgraph.h
1 /* Callgraph handling code.
2 Copyright (C) 2003-2014 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #ifndef GCC_CGRAPH_H
22 #define GCC_CGRAPH_H
23
24 #include "hash-map.h"
25 #include "is-a.h"
26 #include "plugin-api.h"
27 #include "vec.h"
28 #include "basic-block.h"
29 #include "function.h"
30 #include "ipa-ref.h"
31 #include "dumpfile.h"
32
33 /* Symbol table consists of functions and variables.
34 TODO: add labels and CONST_DECLs. */
35 enum symtab_type
36 {
37 SYMTAB_SYMBOL,
38 SYMTAB_FUNCTION,
39 SYMTAB_VARIABLE
40 };
41
42 /* Section names are stored as reference counted strings in GGC safe hashtable
43 (to make them survive through PCH). */
44
45 struct GTY((for_user)) section_hash_entry_d
46 {
47 int ref_count;
48 char *name; /* As long as this datastructure stays in GGC, we can not put
49 string at the tail of structure of GGC dies in horrible
50 way */
51 };
52
53 typedef struct section_hash_entry_d section_hash_entry;
54
55 struct section_name_hasher : ggc_hasher<section_hash_entry *>
56 {
57 typedef const char *compare_type;
58
59 static hashval_t hash (section_hash_entry *);
60 static bool equal (section_hash_entry *, const char *);
61 };
62
63 enum availability
64 {
65 /* Not yet set by cgraph_function_body_availability. */
66 AVAIL_UNSET,
67 /* Function body/variable initializer is unknown. */
68 AVAIL_NOT_AVAILABLE,
69 /* Function body/variable initializer is known but might be replaced
70 by a different one from other compilation unit and thus needs to
71 be dealt with a care. Like AVAIL_NOT_AVAILABLE it can have
72 arbitrary side effects on escaping variables and functions, while
73 like AVAILABLE it might access static variables. */
74 AVAIL_INTERPOSABLE,
75 /* Function body/variable initializer is known and will be used in final
76 program. */
77 AVAIL_AVAILABLE,
78 /* Function body/variable initializer is known and all it's uses are
79 explicitly visible within current unit (ie it's address is never taken and
80 it is not exported to other units). Currently used only for functions. */
81 AVAIL_LOCAL
82 };
83
84 /* Classification of symbols WRT partitioning. */
85 enum symbol_partitioning_class
86 {
87 /* External declarations are ignored by partitioning algorithms and they are
88 added into the boundary later via compute_ltrans_boundary. */
89 SYMBOL_EXTERNAL,
90 /* Partitioned symbols are pur into one of partitions. */
91 SYMBOL_PARTITION,
92 /* Duplicated symbols (such as comdat or constant pool references) are
93 copied into every node needing them via add_symbol_to_partition. */
94 SYMBOL_DUPLICATE
95 };
96
97 /* Base of all entries in the symbol table.
98 The symtab_node is inherited by cgraph and varpol nodes. */
99 class GTY((desc ("%h.type"), tag ("SYMTAB_SYMBOL"),
100 chain_next ("%h.next"), chain_prev ("%h.previous")))
101 symtab_node
102 {
103 public:
104 /* Return name. */
105 const char *name () const;
106
107 /* Return asm name. */
108 const char * asm_name () const;
109
110 /* Add node into symbol table. This function is not used directly, but via
111 cgraph/varpool node creation routines. */
112 void register_symbol (void);
113
114 /* Remove symbol from symbol table. */
115 void remove (void);
116
117 /* Dump symtab node to F. */
118 void dump (FILE *f);
119
120 /* Dump symtab node to stderr. */
121 void DEBUG_FUNCTION debug (void);
122
123 /* Verify consistency of node. */
124 void DEBUG_FUNCTION verify (void);
125
126 /* Return ipa reference from this symtab_node to
127 REFERED_NODE or REFERED_VARPOOL_NODE. USE_TYPE specify type
128 of the use and STMT the statement (if it exists). */
129 ipa_ref *create_reference (symtab_node *referred_node,
130 enum ipa_ref_use use_type);
131
132 /* Return ipa reference from this symtab_node to
133 REFERED_NODE or REFERED_VARPOOL_NODE. USE_TYPE specify type
134 of the use and STMT the statement (if it exists). */
135 ipa_ref *create_reference (symtab_node *referred_node,
136 enum ipa_ref_use use_type, gimple stmt);
137
138 /* If VAL is a reference to a function or a variable, add a reference from
139 this symtab_node to the corresponding symbol table node. USE_TYPE specify
140 type of the use and STMT the statement (if it exists). Return the new
141 reference or NULL if none was created. */
142 ipa_ref *maybe_create_reference (tree val, enum ipa_ref_use use_type,
143 gimple stmt);
144
145 /* Clone all references from symtab NODE to this symtab_node. */
146 void clone_references (symtab_node *node);
147
148 /* Remove all stmt references in non-speculative references.
149 Those are not maintained during inlining & clonning.
150 The exception are speculative references that are updated along
151 with callgraph edges associated with them. */
152 void clone_referring (symtab_node *node);
153
154 /* Clone reference REF to this symtab_node and set its stmt to STMT. */
155 ipa_ref *clone_reference (ipa_ref *ref, gimple stmt);
156
157 /* Find the structure describing a reference to REFERRED_NODE
158 and associated with statement STMT. */
159 ipa_ref *find_reference (symtab_node *referred_node, gimple stmt,
160 unsigned int lto_stmt_uid);
161
162 /* Remove all references that are associated with statement STMT. */
163 void remove_stmt_references (gimple stmt);
164
165 /* Remove all stmt references in non-speculative references.
166 Those are not maintained during inlining & clonning.
167 The exception are speculative references that are updated along
168 with callgraph edges associated with them. */
169 void clear_stmts_in_references (void);
170
171 /* Remove all references in ref list. */
172 void remove_all_references (void);
173
174 /* Remove all referring items in ref list. */
175 void remove_all_referring (void);
176
177 /* Dump references in ref list to FILE. */
178 void dump_references (FILE *file);
179
180 /* Dump referring in list to FILE. */
181 void dump_referring (FILE *);
182
183 /* Get number of references for this node. */
184 inline unsigned num_references (void)
185 {
186 return ref_list.references ? ref_list.references->length () : 0;
187 }
188
189 /* Iterates I-th reference in the list, REF is also set. */
190 ipa_ref *iterate_reference (unsigned i, ipa_ref *&ref);
191
192 /* Iterates I-th referring item in the list, REF is also set. */
193 ipa_ref *iterate_referring (unsigned i, ipa_ref *&ref);
194
195 /* Iterates I-th referring alias item in the list, REF is also set. */
196 ipa_ref *iterate_direct_aliases (unsigned i, ipa_ref *&ref);
197
198 /* Return true if symtab node and TARGET represents
199 semantically equivalent symbols. */
200 bool semantically_equivalent_p (symtab_node *target);
201
202 /* Classify symbol symtab node for partitioning. */
203 enum symbol_partitioning_class get_partitioning_class (void);
204
205 /* Return comdat group. */
206 tree get_comdat_group ()
207 {
208 return x_comdat_group;
209 }
210
211 /* Return comdat group as identifier_node. */
212 tree get_comdat_group_id ()
213 {
214 if (x_comdat_group && TREE_CODE (x_comdat_group) != IDENTIFIER_NODE)
215 x_comdat_group = DECL_ASSEMBLER_NAME (x_comdat_group);
216 return x_comdat_group;
217 }
218
219 /* Set comdat group. */
220 void set_comdat_group (tree group)
221 {
222 gcc_checking_assert (!group || TREE_CODE (group) == IDENTIFIER_NODE
223 || DECL_P (group));
224 x_comdat_group = group;
225 }
226
227 /* Return section as string. */
228 const char * get_section ()
229 {
230 if (!x_section)
231 return NULL;
232 return x_section->name;
233 }
234
235 /* Remove node from same comdat group. */
236 void remove_from_same_comdat_group (void);
237
238 /* Add this symtab_node to the same comdat group that OLD is in. */
239 void add_to_same_comdat_group (symtab_node *old_node);
240
241 /* Dissolve the same_comdat_group list in which NODE resides. */
242 void dissolve_same_comdat_group_list (void);
243
244 /* Return true when symtab_node is known to be used from other (non-LTO)
245 object file. Known only when doing LTO via linker plugin. */
246 bool used_from_object_file_p (void);
247
248 /* Walk the alias chain to return the symbol NODE is alias of.
249 If NODE is not an alias, return NODE.
250 When AVAILABILITY is non-NULL, get minimal availability in the chain. */
251 symtab_node *ultimate_alias_target (enum availability *avail = NULL);
252
253 /* Return next reachable static symbol with initializer after NODE. */
254 inline symtab_node *next_defined_symbol (void);
255
256 /* Add reference recording that symtab node is alias of TARGET.
257 The function can fail in the case of aliasing cycles; in this case
258 it returns false. */
259 bool resolve_alias (symtab_node *target);
260
261 /* C++ FE sometimes change linkage flags after producing same
262 body aliases. */
263 void fixup_same_cpp_alias_visibility (symtab_node *target);
264
265 /* Call calback on symtab node and aliases associated to this node.
266 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
267 skipped. */
268 bool call_for_symbol_and_aliases (bool (*callback) (symtab_node *, void *),
269 void *data,
270 bool include_overwrite);
271
272 /* If node can not be interposable by static or dynamic linker to point to
273 different definition, return this symbol. Otherwise look for alias with
274 such property and if none exists, introduce new one. */
275 symtab_node *noninterposable_alias (void);
276
277 /* Return node that alias is aliasing. */
278 inline symtab_node *get_alias_target (void);
279
280 /* Set section for symbol and its aliases. */
281 void set_section (const char *section);
282
283 /* Set section, do not recurse into aliases.
284 When one wants to change section of symbol and its aliases,
285 use set_section. */
286 void set_section_for_node (const char *section);
287
288 /* Set initialization priority to PRIORITY. */
289 void set_init_priority (priority_type priority);
290
291 /* Return the initialization priority. */
292 priority_type get_init_priority ();
293
294 /* Return availability of NODE. */
295 enum availability get_availability (void);
296
297 /* Make DECL local. */
298 void make_decl_local (void);
299
300 /* Return true if list contains an alias. */
301 bool has_aliases_p (void);
302
303 /* Return true when the symbol is real symbol, i.e. it is not inline clone
304 or abstract function kept for debug info purposes only. */
305 bool real_symbol_p (void);
306
307 /* Determine if symbol declaration is needed. That is, visible to something
308 either outside this translation unit, something magic in the system
309 configury. This function is used just during symbol creation. */
310 bool needed_p (void);
311
312 /* Return true when there are references to the node. */
313 bool referred_to_p (void);
314
315 /* Return true if NODE can be discarded by linker from the binary. */
316 inline bool
317 can_be_discarded_p (void)
318 {
319 return (DECL_EXTERNAL (decl)
320 || (get_comdat_group ()
321 && resolution != LDPR_PREVAILING_DEF
322 && resolution != LDPR_PREVAILING_DEF_IRONLY
323 && resolution != LDPR_PREVAILING_DEF_IRONLY_EXP));
324 }
325
326 /* Return true if NODE is local to a particular COMDAT group, and must not
327 be named from outside the COMDAT. This is used for C++ decloned
328 constructors. */
329 inline bool comdat_local_p (void)
330 {
331 return (same_comdat_group && !TREE_PUBLIC (decl));
332 }
333
334 /* Return true if ONE and TWO are part of the same COMDAT group. */
335 inline bool in_same_comdat_group_p (symtab_node *target);
336
337 /* Return true when there is a reference to node and it is not vtable. */
338 bool address_taken_from_non_vtable_p (void);
339
340 /* Return true if symbol is known to be nonzero. */
341 bool nonzero_address ();
342
343 /* Return symbol table node associated with DECL, if any,
344 and NULL otherwise. */
345 static inline symtab_node *get (const_tree decl)
346 {
347 #ifdef ENABLE_CHECKING
348 /* Check that we are called for sane type of object - functions
349 and static or external variables. */
350 gcc_checking_assert (TREE_CODE (decl) == FUNCTION_DECL
351 || (TREE_CODE (decl) == VAR_DECL
352 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)
353 || in_lto_p)));
354 /* Check that the mapping is sane - perhaps this check can go away,
355 but at the moment frontends tends to corrupt the mapping by calling
356 memcpy/memset on the tree nodes. */
357 gcc_checking_assert (!decl->decl_with_vis.symtab_node
358 || decl->decl_with_vis.symtab_node->decl == decl);
359 #endif
360 return decl->decl_with_vis.symtab_node;
361 }
362
363 /* Try to find a symtab node for declaration DECL and if it does not
364 exist or if it corresponds to an inline clone, create a new one. */
365 static inline symtab_node * get_create (tree node);
366
367 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
368 Return NULL if there's no such node. */
369 static symtab_node *get_for_asmname (const_tree asmname);
370
371 /* Dump symbol table to F. */
372 static void dump_table (FILE *);
373
374 /* Dump symbol table to stderr. */
375 static inline DEBUG_FUNCTION void debug_symtab (void)
376 {
377 dump_table (stderr);
378 }
379
380 /* Verify symbol table for internal consistency. */
381 static DEBUG_FUNCTION void verify_symtab_nodes (void);
382
383 /* Return true when NODE is known to be used from other (non-LTO)
384 object file. Known only when doing LTO via linker plugin. */
385 static bool used_from_object_file_p_worker (symtab_node *node);
386
387 /* Type of the symbol. */
388 ENUM_BITFIELD (symtab_type) type : 8;
389
390 /* The symbols resolution. */
391 ENUM_BITFIELD (ld_plugin_symbol_resolution) resolution : 8;
392
393 /*** Flags representing the symbol type. ***/
394
395 /* True when symbol corresponds to a definition in current unit.
396 set via finalize_function or finalize_decl */
397 unsigned definition : 1;
398 /* True when symbol is an alias.
399 Set by ssemble_alias. */
400 unsigned alias : 1;
401 /* True when alias is a weakref. */
402 unsigned weakref : 1;
403 /* C++ frontend produce same body aliases and extra name aliases for
404 virtual functions and vtables that are obviously equivalent.
405 Those aliases are bit special, especially because C++ frontend
406 visibility code is so ugly it can not get them right at first time
407 and their visibility needs to be copied from their "masters" at
408 the end of parsing. */
409 unsigned cpp_implicit_alias : 1;
410 /* Set once the definition was analyzed. The list of references and
411 other properties are built during analysis. */
412 unsigned analyzed : 1;
413 /* Set for write-only variables. */
414 unsigned writeonly : 1;
415 /* Visibility of symbol was used for further optimization; do not
416 permit further changes. */
417 unsigned refuse_visibility_changes : 1;
418
419 /*** Visibility and linkage flags. ***/
420
421 /* Set when function is visible by other units. */
422 unsigned externally_visible : 1;
423 /* Don't reorder to other symbols having this set. */
424 unsigned no_reorder : 1;
425 /* The symbol will be assumed to be used in an invisible way (like
426 by an toplevel asm statement). */
427 unsigned force_output : 1;
428 /* Like FORCE_OUTPUT, but in the case it is ABI requiring the symbol to be
429 exported. Unlike FORCE_OUTPUT this flag gets cleared to symbols promoted
430 to static and it does not inhibit optimization. */
431 unsigned forced_by_abi : 1;
432 /* True when the name is known to be unique and thus it does not need mangling. */
433 unsigned unique_name : 1;
434 /* Specify whether the section was set by user or by
435 compiler via -ffunction-sections. */
436 unsigned implicit_section : 1;
437 /* True when body and other characteristics have been removed by
438 symtab_remove_unreachable_nodes. */
439 unsigned body_removed : 1;
440
441 /*** WHOPR Partitioning flags.
442 These flags are used at ltrans stage when only part of the callgraph is
443 available. ***/
444
445 /* Set when variable is used from other LTRANS partition. */
446 unsigned used_from_other_partition : 1;
447 /* Set when function is available in the other LTRANS partition.
448 During WPA output it is used to mark nodes that are present in
449 multiple partitions. */
450 unsigned in_other_partition : 1;
451
452
453
454 /*** other flags. ***/
455
456 /* Set when symbol has address taken. */
457 unsigned address_taken : 1;
458 /* Set when init priority is set. */
459 unsigned in_init_priority_hash : 1;
460
461
462 /* Ordering of all symtab entries. */
463 int order;
464
465 /* Declaration representing the symbol. */
466 tree decl;
467
468 /* Linked list of symbol table entries starting with symtab_nodes. */
469 symtab_node *next;
470 symtab_node *previous;
471
472 /* Linked list of symbols with the same asm name. There may be multiple
473 entries for single symbol name during LTO, because symbols are renamed
474 only after partitioning.
475
476 Because inline clones are kept in the assembler name has, they also produce
477 duplicate entries.
478
479 There are also several long standing bugs where frontends and builtin
480 code produce duplicated decls. */
481 symtab_node *next_sharing_asm_name;
482 symtab_node *previous_sharing_asm_name;
483
484 /* Circular list of nodes in the same comdat group if non-NULL. */
485 symtab_node *same_comdat_group;
486
487 /* Vectors of referring and referenced entities. */
488 ipa_ref_list ref_list;
489
490 /* Alias target. May be either DECL pointer or ASSEMBLER_NAME pointer
491 depending to what was known to frontend on the creation time.
492 Once alias is resolved, this pointer become NULL. */
493 tree alias_target;
494
495 /* File stream where this node is being written to. */
496 struct lto_file_decl_data * lto_file_data;
497
498 PTR GTY ((skip)) aux;
499
500 /* Comdat group the symbol is in. Can be private if GGC allowed that. */
501 tree x_comdat_group;
502
503 /* Section name. Again can be private, if allowed. */
504 section_hash_entry *x_section;
505
506 protected:
507 /* Dump base fields of symtab nodes to F. Not to be used directly. */
508 void dump_base (FILE *);
509
510 /* Verify common part of symtab node. */
511 bool DEBUG_FUNCTION verify_base (void);
512
513 /* Remove node from symbol table. This function is not used directly, but via
514 cgraph/varpool node removal routines. */
515 void unregister (void);
516
517 /* Return the initialization and finalization priority information for
518 DECL. If there is no previous priority information, a freshly
519 allocated structure is returned. */
520 struct symbol_priority_map *priority_info (void);
521
522 private:
523 /* Worker for set_section. */
524 static bool set_section (symtab_node *n, void *s);
525
526 /* Worker for symtab_resolve_alias. */
527 static bool set_implicit_section (symtab_node *n, void *);
528
529 /* Worker searching noninterposable alias. */
530 static bool noninterposable_alias (symtab_node *node, void *data);
531 };
532
533 /* Walk all aliases for NODE. */
534 #define FOR_EACH_ALIAS(node, alias) \
535 for (unsigned x_i = 0; node->iterate_direct_aliases (x_i, alias); x_i++)
536
537 /* This is the information that is put into the cgraph local structure
538 to recover a function. */
539 struct lto_file_decl_data;
540
541 extern const char * const cgraph_availability_names[];
542 extern const char * const ld_plugin_symbol_resolution_names[];
543 extern const char * const tls_model_names[];
544
545 /* Information about thunk, used only for same body aliases. */
546
547 struct GTY(()) cgraph_thunk_info {
548 /* Information about the thunk. */
549 HOST_WIDE_INT fixed_offset;
550 HOST_WIDE_INT virtual_value;
551 tree alias;
552 bool this_adjusting;
553 bool virtual_offset_p;
554 /* Set to true when alias node is thunk. */
555 bool thunk_p;
556 };
557
558 /* Information about the function collected locally.
559 Available after function is analyzed. */
560
561 struct GTY(()) cgraph_local_info {
562 /* Set when function function is visible in current compilation unit only
563 and its address is never taken. */
564 unsigned local : 1;
565
566 /* False when there is something makes versioning impossible. */
567 unsigned versionable : 1;
568
569 /* False when function calling convention and signature can not be changed.
570 This is the case when __builtin_apply_args is used. */
571 unsigned can_change_signature : 1;
572
573 /* True when the function has been originally extern inline, but it is
574 redefined now. */
575 unsigned redefined_extern_inline : 1;
576
577 /* True if the function may enter serial irrevocable mode. */
578 unsigned tm_may_enter_irr : 1;
579 };
580
581 /* Information about the function that needs to be computed globally
582 once compilation is finished. Available only with -funit-at-a-time. */
583
584 struct GTY(()) cgraph_global_info {
585 /* For inline clones this points to the function they will be
586 inlined into. */
587 cgraph_node *inlined_to;
588 };
589
590 /* Information about the function that is propagated by the RTL backend.
591 Available only for functions that has been already assembled. */
592
593 struct GTY(()) cgraph_rtl_info {
594 unsigned int preferred_incoming_stack_boundary;
595
596 /* Call unsaved hard registers really used by the corresponding
597 function (including ones used by functions called by the
598 function). */
599 HARD_REG_SET function_used_regs;
600 /* Set if function_used_regs is valid. */
601 unsigned function_used_regs_valid: 1;
602 };
603
604 /* Represent which DECL tree (or reference to such tree)
605 will be replaced by another tree while versioning. */
606 struct GTY(()) ipa_replace_map
607 {
608 /* The tree that will be replaced. */
609 tree old_tree;
610 /* The new (replacing) tree. */
611 tree new_tree;
612 /* Parameter number to replace, when old_tree is NULL. */
613 int parm_num;
614 /* True when a substitution should be done, false otherwise. */
615 bool replace_p;
616 /* True when we replace a reference to old_tree. */
617 bool ref_p;
618 };
619
620 struct GTY(()) cgraph_clone_info
621 {
622 vec<ipa_replace_map *, va_gc> *tree_map;
623 bitmap args_to_skip;
624 bitmap combined_args_to_skip;
625 };
626
627 enum cgraph_simd_clone_arg_type
628 {
629 SIMD_CLONE_ARG_TYPE_VECTOR,
630 SIMD_CLONE_ARG_TYPE_UNIFORM,
631 SIMD_CLONE_ARG_TYPE_LINEAR_CONSTANT_STEP,
632 SIMD_CLONE_ARG_TYPE_LINEAR_VARIABLE_STEP,
633 SIMD_CLONE_ARG_TYPE_MASK
634 };
635
636 /* Function arguments in the original function of a SIMD clone.
637 Supplementary data for `struct simd_clone'. */
638
639 struct GTY(()) cgraph_simd_clone_arg {
640 /* Original function argument as it originally existed in
641 DECL_ARGUMENTS. */
642 tree orig_arg;
643
644 /* orig_arg's function (or for extern functions type from
645 TYPE_ARG_TYPES). */
646 tree orig_type;
647
648 /* If argument is a vector, this holds the vector version of
649 orig_arg that after adjusting the argument types will live in
650 DECL_ARGUMENTS. Otherwise, this is NULL.
651
652 This basically holds:
653 vector(simdlen) __typeof__(orig_arg) new_arg. */
654 tree vector_arg;
655
656 /* vector_arg's type (or for extern functions new vector type. */
657 tree vector_type;
658
659 /* If argument is a vector, this holds the array where the simd
660 argument is held while executing the simd clone function. This
661 is a local variable in the cloned function. Its content is
662 copied from vector_arg upon entry to the clone.
663
664 This basically holds:
665 __typeof__(orig_arg) simd_array[simdlen]. */
666 tree simd_array;
667
668 /* A SIMD clone's argument can be either linear (constant or
669 variable), uniform, or vector. */
670 enum cgraph_simd_clone_arg_type arg_type;
671
672 /* For arg_type SIMD_CLONE_ARG_TYPE_LINEAR_CONSTANT_STEP this is
673 the constant linear step, if arg_type is
674 SIMD_CLONE_ARG_TYPE_LINEAR_VARIABLE_STEP, this is index of
675 the uniform argument holding the step, otherwise 0. */
676 HOST_WIDE_INT linear_step;
677
678 /* Variable alignment if available, otherwise 0. */
679 unsigned int alignment;
680 };
681
682 /* Specific data for a SIMD function clone. */
683
684 struct GTY(()) cgraph_simd_clone {
685 /* Number of words in the SIMD lane associated with this clone. */
686 unsigned int simdlen;
687
688 /* Number of annotated function arguments in `args'. This is
689 usually the number of named arguments in FNDECL. */
690 unsigned int nargs;
691
692 /* Max hardware vector size in bits for integral vectors. */
693 unsigned int vecsize_int;
694
695 /* Max hardware vector size in bits for floating point vectors. */
696 unsigned int vecsize_float;
697
698 /* The mangling character for a given vector size. This is is used
699 to determine the ISA mangling bit as specified in the Intel
700 Vector ABI. */
701 unsigned char vecsize_mangle;
702
703 /* True if this is the masked, in-branch version of the clone,
704 otherwise false. */
705 unsigned int inbranch : 1;
706
707 /* True if this is a Cilk Plus variant. */
708 unsigned int cilk_elemental : 1;
709
710 /* Doubly linked list of SIMD clones. */
711 cgraph_node *prev_clone, *next_clone;
712
713 /* Original cgraph node the SIMD clones were created for. */
714 cgraph_node *origin;
715
716 /* Annotated function arguments for the original function. */
717 cgraph_simd_clone_arg GTY((length ("%h.nargs"))) args[1];
718 };
719
720 /* Function Multiversioning info. */
721 struct GTY((for_user)) cgraph_function_version_info {
722 /* The cgraph_node for which the function version info is stored. */
723 cgraph_node *this_node;
724 /* Chains all the semantically identical function versions. The
725 first function in this chain is the version_info node of the
726 default function. */
727 cgraph_function_version_info *prev;
728 /* If this version node corresponds to a dispatcher for function
729 versions, this points to the version info node of the default
730 function, the first node in the chain. */
731 cgraph_function_version_info *next;
732 /* If this node corresponds to a function version, this points
733 to the dispatcher function decl, which is the function that must
734 be called to execute the right function version at run-time.
735
736 If this cgraph node is a dispatcher (if dispatcher_function is
737 true, in the cgraph_node struct) for function versions, this
738 points to resolver function, which holds the function body of the
739 dispatcher. The dispatcher decl is an alias to the resolver
740 function decl. */
741 tree dispatcher_resolver;
742 };
743
744 #define DEFCIFCODE(code, type, string) CIF_ ## code,
745 /* Reasons for inlining failures. */
746
747 enum cgraph_inline_failed_t {
748 #include "cif-code.def"
749 CIF_N_REASONS
750 };
751
752 enum cgraph_inline_failed_type_t
753 {
754 CIF_FINAL_NORMAL = 0,
755 CIF_FINAL_ERROR
756 };
757
758 struct cgraph_edge;
759
760 struct cgraph_edge_hasher : ggc_hasher<cgraph_edge *>
761 {
762 typedef gimple compare_type;
763
764 static hashval_t hash (cgraph_edge *);
765 static bool equal (cgraph_edge *, gimple);
766 };
767
768 /* The cgraph data structure.
769 Each function decl has assigned cgraph_node listing callees and callers. */
770
771 struct GTY((tag ("SYMTAB_FUNCTION"))) cgraph_node : public symtab_node {
772 public:
773 /* Remove the node from cgraph and all inline clones inlined into it.
774 Skip however removal of FORBIDDEN_NODE and return true if it needs to be
775 removed. This allows to call the function from outer loop walking clone
776 tree. */
777 bool remove_symbol_and_inline_clones (cgraph_node *forbidden_node = NULL);
778
779 /* Record all references from cgraph_node that are taken
780 in statement STMT. */
781 void record_stmt_references (gimple stmt);
782
783 /* Like cgraph_set_call_stmt but walk the clone tree and update all
784 clones sharing the same function body.
785 When WHOLE_SPECULATIVE_EDGES is true, all three components of
786 speculative edge gets updated. Otherwise we update only direct
787 call. */
788 void set_call_stmt_including_clones (gimple old_stmt, gimple new_stmt,
789 bool update_speculative = true);
790
791 /* Walk the alias chain to return the function cgraph_node is alias of.
792 Walk through thunk, too.
793 When AVAILABILITY is non-NULL, get minimal availability in the chain. */
794 cgraph_node *function_symbol (enum availability *avail = NULL);
795
796 /* Create node representing clone of N executed COUNT times. Decrease
797 the execution counts from original node too.
798 The new clone will have decl set to DECL that may or may not be the same
799 as decl of N.
800
801 When UPDATE_ORIGINAL is true, the counts are subtracted from the original
802 function's profile to reflect the fact that part of execution is handled
803 by node.
804 When CALL_DUPLICATOIN_HOOK is true, the ipa passes are acknowledged about
805 the new clone. Otherwise the caller is responsible for doing so later.
806
807 If the new node is being inlined into another one, NEW_INLINED_TO should be
808 the outline function the new one is (even indirectly) inlined to.
809 All hooks will see this in node's global.inlined_to, when invoked.
810 Can be NULL if the node is not inlined. */
811 cgraph_node *create_clone (tree decl, gcov_type count, int freq,
812 bool update_original,
813 vec<cgraph_edge *> redirect_callers,
814 bool call_duplication_hook,
815 cgraph_node *new_inlined_to,
816 bitmap args_to_skip);
817
818 /* Create callgraph node clone with new declaration. The actual body will
819 be copied later at compilation stage. */
820 cgraph_node *create_virtual_clone (vec<cgraph_edge *> redirect_callers,
821 vec<ipa_replace_map *, va_gc> *tree_map,
822 bitmap args_to_skip, const char * suffix);
823
824 /* cgraph node being removed from symbol table; see if its entry can be
825 replaced by other inline clone. */
826 cgraph_node *find_replacement (void);
827
828 /* Create a new cgraph node which is the new version of
829 callgraph node. REDIRECT_CALLERS holds the callers
830 edges which should be redirected to point to
831 NEW_VERSION. ALL the callees edges of the node
832 are cloned to the new version node. Return the new
833 version node.
834
835 If non-NULL BLOCK_TO_COPY determine what basic blocks
836 was copied to prevent duplications of calls that are dead
837 in the clone. */
838
839 cgraph_node *create_version_clone (tree new_decl,
840 vec<cgraph_edge *> redirect_callers,
841 bitmap bbs_to_copy);
842
843 /* Perform function versioning.
844 Function versioning includes copying of the tree and
845 a callgraph update (creating a new cgraph node and updating
846 its callees and callers).
847
848 REDIRECT_CALLERS varray includes the edges to be redirected
849 to the new version.
850
851 TREE_MAP is a mapping of tree nodes we want to replace with
852 new ones (according to results of prior analysis).
853
854 If non-NULL ARGS_TO_SKIP determine function parameters to remove
855 from new version.
856 If SKIP_RETURN is true, the new version will return void.
857 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
858 If non_NULL NEW_ENTRY determine new entry BB of the clone.
859
860 Return the new version's cgraph node. */
861 cgraph_node *create_version_clone_with_body
862 (vec<cgraph_edge *> redirect_callers,
863 vec<ipa_replace_map *, va_gc> *tree_map, bitmap args_to_skip,
864 bool skip_return, bitmap bbs_to_copy, basic_block new_entry_block,
865 const char *clone_name);
866
867 /* Insert a new cgraph_function_version_info node into cgraph_fnver_htab
868 corresponding to cgraph_node. */
869 cgraph_function_version_info *insert_new_function_version (void);
870
871 /* Get the cgraph_function_version_info node corresponding to node. */
872 cgraph_function_version_info *function_version (void);
873
874 /* Discover all functions and variables that are trivially needed, analyze
875 them as well as all functions and variables referred by them */
876 void analyze (void);
877
878 /* Add thunk alias into callgraph. The alias declaration is ALIAS and it
879 aliases DECL with an adjustments made into the first parameter.
880 See comments in thunk_adjust for detail on the parameters. */
881 cgraph_node * create_thunk (tree alias, tree, bool this_adjusting,
882 HOST_WIDE_INT fixed_offset,
883 HOST_WIDE_INT virtual_value,
884 tree virtual_offset,
885 tree real_alias);
886
887
888 /* Return node that alias is aliasing. */
889 inline cgraph_node *get_alias_target (void);
890
891 /* Given function symbol, walk the alias chain to return the function node
892 is alias of. Do not walk through thunks.
893 When AVAILABILITY is non-NULL, get minimal availability in the chain. */
894
895 cgraph_node *ultimate_alias_target (availability *availability = NULL);
896
897 /* Expand thunk NODE to gimple if possible.
898 When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
899 no assembler is produced.
900 When OUTPUT_ASM_THUNK is true, also produce assembler for
901 thunks that are not lowered. */
902 bool expand_thunk (bool output_asm_thunks, bool force_gimple_thunk);
903
904 /* Assemble thunks and aliases associated to node. */
905 void assemble_thunks_and_aliases (void);
906
907 /* Expand function specified by node. */
908 void expand (void);
909
910 /* As an GCC extension we allow redefinition of the function. The
911 semantics when both copies of bodies differ is not well defined.
912 We replace the old body with new body so in unit at a time mode
913 we always use new body, while in normal mode we may end up with
914 old body inlined into some functions and new body expanded and
915 inlined in others. */
916 void reset (void);
917
918 /* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
919 kind of wrapper method. */
920 void create_wrapper (cgraph_node *target);
921
922 /* Verify cgraph nodes of the cgraph node. */
923 void DEBUG_FUNCTION verify_node (void);
924
925 /* Remove function from symbol table. */
926 void remove (void);
927
928 /* Dump call graph node to file F. */
929 void dump (FILE *f);
930
931 /* Dump call graph node to stderr. */
932 void DEBUG_FUNCTION debug (void);
933
934 /* When doing LTO, read cgraph_node's body from disk if it is not already
935 present. */
936 bool get_body (void);
937
938 /* Release memory used to represent body of function.
939 Use this only for functions that are released before being translated to
940 target code (i.e. RTL). Functions that are compiled to RTL and beyond
941 are free'd in final.c via free_after_compilation(). */
942 void release_body (bool keep_arguments = false);
943
944 /* Return the DECL_STRUCT_FUNCTION of the function. */
945 struct function *get_fun (void);
946
947 /* cgraph_node is no longer nested function; update cgraph accordingly. */
948 void unnest (void);
949
950 /* Bring cgraph node local. */
951 void make_local (void);
952
953 /* Likewise indicate that a node is having address taken. */
954 void mark_address_taken (void);
955
956 /* Set fialization priority to PRIORITY. */
957 void set_fini_priority (priority_type priority);
958
959 /* Return the finalization priority. */
960 priority_type get_fini_priority (void);
961
962 /* Create edge from a given function to CALLEE in the cgraph. */
963 cgraph_edge *create_edge (cgraph_node *callee,
964 gimple call_stmt, gcov_type count,
965 int freq);
966
967 /* Create an indirect edge with a yet-undetermined callee where the call
968 statement destination is a formal parameter of the caller with index
969 PARAM_INDEX. */
970 cgraph_edge *create_indirect_edge (gimple call_stmt, int ecf_flags,
971 gcov_type count, int freq,
972 bool compute_indirect_info = true);
973
974 /* Like cgraph_create_edge walk the clone tree and update all clones sharing
975 same function body. If clones already have edge for OLD_STMT; only
976 update the edge same way as cgraph_set_call_stmt_including_clones does. */
977 void create_edge_including_clones (cgraph_node *callee,
978 gimple old_stmt, gimple stmt,
979 gcov_type count,
980 int freq,
981 cgraph_inline_failed_t reason);
982
983 /* Return the callgraph edge representing the GIMPLE_CALL statement
984 CALL_STMT. */
985 cgraph_edge *get_edge (gimple call_stmt);
986
987 /* Collect all callers of cgraph_node and its aliases that are known to lead
988 to NODE (i.e. are not overwritable). */
989 vec<cgraph_edge *> collect_callers (void);
990
991 /* Remove all callers from the node. */
992 void remove_callers (void);
993
994 /* Remove all callees from the node. */
995 void remove_callees (void);
996
997 /* Return function availability. See cgraph.h for description of individual
998 return values. */
999 enum availability get_availability (void);
1000
1001 /* Set TREE_NOTHROW on cgraph_node's decl and on aliases of the node
1002 if any to NOTHROW. */
1003 void set_nothrow_flag (bool nothrow);
1004
1005 /* Set TREE_READONLY on cgraph_node's decl and on aliases of the node
1006 if any to READONLY. */
1007 void set_const_flag (bool readonly, bool looping);
1008
1009 /* Set DECL_PURE_P on cgraph_node's decl and on aliases of the node
1010 if any to PURE. */
1011 void set_pure_flag (bool pure, bool looping);
1012
1013 /* Call calback on function and aliases associated to the function.
1014 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
1015 skipped. */
1016
1017 bool call_for_symbol_and_aliases (bool (*callback) (cgraph_node *,
1018 void *),
1019 void *data, bool include_overwritable);
1020
1021 /* Call calback on cgraph_node, thunks and aliases associated to NODE.
1022 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
1023 skipped. */
1024 bool call_for_symbol_thunks_and_aliases (bool (*callback) (cgraph_node *node,
1025 void *data),
1026 void *data,
1027 bool include_overwritable);
1028
1029 /* Likewise indicate that a node is needed, i.e. reachable via some
1030 external means. */
1031 inline void mark_force_output (void);
1032
1033 /* Return true when function can be marked local. */
1034 bool local_p (void);
1035
1036 /* Return true if cgraph_node can be made local for API change.
1037 Extern inline functions and C++ COMDAT functions can be made local
1038 at the expense of possible code size growth if function is used in multiple
1039 compilation units. */
1040 bool can_be_local_p (void);
1041
1042 /* Return true when cgraph_node can not return or throw and thus
1043 it is safe to ignore its side effects for IPA analysis. */
1044 bool cannot_return_p (void);
1045
1046 /* Return true when function cgraph_node and all its aliases are only called
1047 directly.
1048 i.e. it is not externally visible, address was not taken and
1049 it is not used in any other non-standard way. */
1050 bool only_called_directly_p (void);
1051
1052 /* Return true when function is only called directly or it has alias.
1053 i.e. it is not externally visible, address was not taken and
1054 it is not used in any other non-standard way. */
1055 inline bool only_called_directly_or_aliased_p (void);
1056
1057 /* Return true when function cgraph_node can be expected to be removed
1058 from program when direct calls in this compilation unit are removed.
1059
1060 As a special case COMDAT functions are
1061 cgraph_can_remove_if_no_direct_calls_p while the are not
1062 cgraph_only_called_directly_p (it is possible they are called from other
1063 unit)
1064
1065 This function behaves as cgraph_only_called_directly_p because eliminating
1066 all uses of COMDAT function does not make it necessarily disappear from
1067 the program unless we are compiling whole program or we do LTO. In this
1068 case we know we win since dynamic linking will not really discard the
1069 linkonce section. */
1070 bool will_be_removed_from_program_if_no_direct_calls_p (void);
1071
1072 /* Return true when function can be removed from callgraph
1073 if all direct calls are eliminated. */
1074 bool can_remove_if_no_direct_calls_and_refs_p (void);
1075
1076 /* Return true when function cgraph_node and its aliases can be removed from
1077 callgraph if all direct calls are eliminated. */
1078 bool can_remove_if_no_direct_calls_p (void);
1079
1080 /* Return true when callgraph node is a function with Gimple body defined
1081 in current unit. Functions can also be define externally or they
1082 can be thunks with no Gimple representation.
1083
1084 Note that at WPA stage, the function body may not be present in memory. */
1085 inline bool has_gimple_body_p (void);
1086
1087 /* Return true if function should be optimized for size. */
1088 bool optimize_for_size_p (void);
1089
1090 /* Dump the callgraph to file F. */
1091 static void dump_cgraph (FILE *f);
1092
1093 /* Dump the call graph to stderr. */
1094 static inline
1095 void debug_cgraph (void)
1096 {
1097 dump_cgraph (stderr);
1098 }
1099
1100 /* Record that DECL1 and DECL2 are semantically identical function
1101 versions. */
1102 static void record_function_versions (tree decl1, tree decl2);
1103
1104 /* Remove the cgraph_function_version_info and cgraph_node for DECL. This
1105 DECL is a duplicate declaration. */
1106 static void delete_function_version (tree decl);
1107
1108 /* Add the function FNDECL to the call graph.
1109 Unlike finalize_function, this function is intended to be used
1110 by middle end and allows insertion of new function at arbitrary point
1111 of compilation. The function can be either in high, low or SSA form
1112 GIMPLE.
1113
1114 The function is assumed to be reachable and have address taken (so no
1115 API breaking optimizations are performed on it).
1116
1117 Main work done by this function is to enqueue the function for later
1118 processing to avoid need the passes to be re-entrant. */
1119 static void add_new_function (tree fndecl, bool lowered);
1120
1121 /* Return callgraph node for given symbol and check it is a function. */
1122 static inline cgraph_node *get (const_tree decl)
1123 {
1124 gcc_checking_assert (TREE_CODE (decl) == FUNCTION_DECL);
1125 return dyn_cast <cgraph_node *> (symtab_node::get (decl));
1126 }
1127
1128 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
1129 logic in effect. If NO_COLLECT is true, then our caller cannot stand to
1130 have the garbage collector run at the moment. We would need to either
1131 create a new GC context, or just not compile right now. */
1132 static void finalize_function (tree, bool);
1133
1134 /* Return cgraph node assigned to DECL. Create new one when needed. */
1135 static cgraph_node * create (tree decl);
1136
1137 /* Try to find a call graph node for declaration DECL and if it does not
1138 exist or if it corresponds to an inline clone, create a new one. */
1139 static cgraph_node * get_create (tree);
1140
1141 /* Return local info for the compiled function. */
1142 static cgraph_local_info *local_info (tree decl);
1143
1144 /* Return global info for the compiled function. */
1145 static cgraph_global_info *global_info (tree);
1146
1147 /* Return local info for the compiled function. */
1148 static cgraph_rtl_info *rtl_info (tree);
1149
1150 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
1151 Return NULL if there's no such node. */
1152 static cgraph_node *get_for_asmname (tree asmname);
1153
1154 /* Attempt to mark ALIAS as an alias to DECL. Return alias node if
1155 successful and NULL otherwise.
1156 Same body aliases are output whenever the body of DECL is output,
1157 and cgraph_node::get (ALIAS) transparently
1158 returns cgraph_node::get (DECL). */
1159 static cgraph_node * create_same_body_alias (tree alias, tree decl);
1160
1161 /* Worker for cgraph_can_remove_if_no_direct_calls_p. */
1162 static bool used_from_object_file_p_worker (cgraph_node *node, void *)
1163 {
1164 return node->used_from_object_file_p ();
1165 }
1166
1167 /* Return true when cgraph_node can not be local.
1168 Worker for cgraph_local_node_p. */
1169 static bool non_local_p (cgraph_node *node, void *);
1170
1171 /* Verify whole cgraph structure. */
1172 static void DEBUG_FUNCTION verify_cgraph_nodes (void);
1173
1174 /* Worker to bring NODE local. */
1175 static bool make_local (cgraph_node *node, void *);
1176
1177 /* Mark ALIAS as an alias to DECL. DECL_NODE is cgraph node representing
1178 the function body is associated
1179 with (not necessarily cgraph_node (DECL). */
1180 static cgraph_node *create_alias (tree alias, tree target);
1181
1182 cgraph_edge *callees;
1183 cgraph_edge *callers;
1184 /* List of edges representing indirect calls with a yet undetermined
1185 callee. */
1186 cgraph_edge *indirect_calls;
1187 /* For nested functions points to function the node is nested in. */
1188 cgraph_node *origin;
1189 /* Points to first nested function, if any. */
1190 cgraph_node *nested;
1191 /* Pointer to the next function with same origin, if any. */
1192 cgraph_node *next_nested;
1193 /* Pointer to the next clone. */
1194 cgraph_node *next_sibling_clone;
1195 cgraph_node *prev_sibling_clone;
1196 cgraph_node *clones;
1197 cgraph_node *clone_of;
1198 /* For functions with many calls sites it holds map from call expression
1199 to the edge to speed up cgraph_edge function. */
1200 hash_table<cgraph_edge_hasher> *GTY(()) call_site_hash;
1201 /* Declaration node used to be clone of. */
1202 tree former_clone_of;
1203
1204 /* If this is a SIMD clone, this points to the SIMD specific
1205 information for it. */
1206 cgraph_simd_clone *simdclone;
1207 /* If this function has SIMD clones, this points to the first clone. */
1208 cgraph_node *simd_clones;
1209
1210 /* Interprocedural passes scheduled to have their transform functions
1211 applied next time we execute local pass on them. We maintain it
1212 per-function in order to allow IPA passes to introduce new functions. */
1213 vec<ipa_opt_pass> GTY((skip)) ipa_transforms_to_apply;
1214
1215 cgraph_local_info local;
1216 cgraph_global_info global;
1217 cgraph_rtl_info rtl;
1218 cgraph_clone_info clone;
1219 cgraph_thunk_info thunk;
1220
1221 /* Expected number of executions: calculated in profile.c. */
1222 gcov_type count;
1223 /* How to scale counts at materialization time; used to merge
1224 LTO units with different number of profile runs. */
1225 int count_materialization_scale;
1226 /* Unique id of the node. */
1227 int uid;
1228 /* ID assigned by the profiling. */
1229 unsigned int profile_id;
1230 /* Time profiler: first run of function. */
1231 int tp_first_run;
1232
1233 /* Set when decl is an abstract function pointed to by the
1234 ABSTRACT_DECL_ORIGIN of a reachable function. */
1235 unsigned used_as_abstract_origin : 1;
1236 /* Set once the function is lowered (i.e. its CFG is built). */
1237 unsigned lowered : 1;
1238 /* Set once the function has been instantiated and its callee
1239 lists created. */
1240 unsigned process : 1;
1241 /* How commonly executed the node is. Initialized during branch
1242 probabilities pass. */
1243 ENUM_BITFIELD (node_frequency) frequency : 2;
1244 /* True when function can only be called at startup (from static ctor). */
1245 unsigned only_called_at_startup : 1;
1246 /* True when function can only be called at startup (from static dtor). */
1247 unsigned only_called_at_exit : 1;
1248 /* True when function is the transactional clone of a function which
1249 is called only from inside transactions. */
1250 /* ?? We should be able to remove this. We have enough bits in
1251 cgraph to calculate it. */
1252 unsigned tm_clone : 1;
1253 /* True if this decl is a dispatcher for function versions. */
1254 unsigned dispatcher_function : 1;
1255 /* True if this decl calls a COMDAT-local function. This is set up in
1256 compute_inline_parameters and inline_call. */
1257 unsigned calls_comdat_local : 1;
1258 /* True if node has been created by merge operation in IPA-ICF. */
1259 unsigned icf_merged: 1;
1260 };
1261
1262 /* A cgraph node set is a collection of cgraph nodes. A cgraph node
1263 can appear in multiple sets. */
1264 struct cgraph_node_set_def
1265 {
1266 hash_map<cgraph_node *, size_t> *map;
1267 vec<cgraph_node *> nodes;
1268 };
1269
1270 typedef cgraph_node_set_def *cgraph_node_set;
1271 typedef struct varpool_node_set_def *varpool_node_set;
1272
1273 class varpool_node;
1274
1275 /* A varpool node set is a collection of varpool nodes. A varpool node
1276 can appear in multiple sets. */
1277 struct varpool_node_set_def
1278 {
1279 hash_map<varpool_node *, size_t> * map;
1280 vec<varpool_node *> nodes;
1281 };
1282
1283 /* Iterator structure for cgraph node sets. */
1284 struct cgraph_node_set_iterator
1285 {
1286 cgraph_node_set set;
1287 unsigned index;
1288 };
1289
1290 /* Iterator structure for varpool node sets. */
1291 struct varpool_node_set_iterator
1292 {
1293 varpool_node_set set;
1294 unsigned index;
1295 };
1296
1297 /* Context of polymorphic call. It represent information about the type of
1298 instance that may reach the call. This is used by ipa-devirt walkers of the
1299 type inheritance graph. */
1300
1301 class GTY(()) ipa_polymorphic_call_context {
1302 public:
1303 /* The called object appears in an object of type OUTER_TYPE
1304 at offset OFFSET. When information is not 100% reliable, we
1305 use SPECULATIVE_OUTER_TYPE and SPECULATIVE_OFFSET. */
1306 HOST_WIDE_INT offset;
1307 HOST_WIDE_INT speculative_offset;
1308 tree outer_type;
1309 tree speculative_outer_type;
1310 /* True if outer object may be in construction or destruction. */
1311 unsigned maybe_in_construction : 1;
1312 /* True if outer object may be of derived type. */
1313 unsigned maybe_derived_type : 1;
1314 /* True if speculative outer object may be of derived type. We always
1315 speculate that construction does not happen. */
1316 unsigned speculative_maybe_derived_type : 1;
1317 /* True if the context is invalid and all calls should be redirected
1318 to BUILTIN_UNREACHABLE. */
1319 unsigned invalid : 1;
1320 /* True if the outer type is dynamic. */
1321 unsigned dynamic : 1;
1322
1323 /* Build empty "I know nothing" context. */
1324 ipa_polymorphic_call_context ();
1325 /* Build polymorphic call context for indirect call E. */
1326 ipa_polymorphic_call_context (cgraph_edge *e);
1327 /* Build polymorphic call context for IP invariant CST.
1328 If specified, OTR_TYPE specify the type of polymorphic call
1329 that takes CST+OFFSET as a prameter. */
1330 ipa_polymorphic_call_context (tree cst, tree otr_type = NULL,
1331 HOST_WIDE_INT offset = 0);
1332 /* Build context for pointer REF contained in FNDECL at statement STMT.
1333 if INSTANCE is non-NULL, return pointer to the object described by
1334 the context. */
1335 ipa_polymorphic_call_context (tree fndecl, tree ref, gimple stmt,
1336 tree *instance = NULL);
1337
1338 /* Look for vtable stores or constructor calls to work out dynamic type
1339 of memory location. */
1340 bool get_dynamic_type (tree, tree, tree, gimple);
1341
1342 /* Make context non-speculative. */
1343 void clear_speculation ();
1344
1345 /* Walk container types and modify context to point to actual class
1346 containing OTR_TYPE (if non-NULL) as base class.
1347 Return true if resulting context is valid.
1348
1349 When CONSIDER_PLACEMENT_NEW is false, reject contexts that may be made
1350 valid only via alocation of new polymorphic type inside by means
1351 of placement new.
1352
1353 When CONSIDER_BASES is false, only look for actual fields, not base types
1354 of TYPE. */
1355 bool restrict_to_inner_class (tree otr_type,
1356 bool consider_placement_new = true,
1357 bool consider_bases = true);
1358
1359 /* Adjust all offsets in contexts by given number of bits. */
1360 void offset_by (HOST_WIDE_INT);
1361 /* Use when we can not track dynamic type change. This speculatively assume
1362 type change is not happening. */
1363 void possible_dynamic_type_change (bool, tree otr_type = NULL);
1364 /* Assume that both THIS and a given context is valid and strenghten THIS
1365 if possible. Return true if any strenghtening was made.
1366 If actual type the context is being used in is known, OTR_TYPE should be
1367 set accordingly. This improves quality of combined result. */
1368 bool combine_with (ipa_polymorphic_call_context, tree otr_type = NULL);
1369
1370 /* Return TRUE if context is fully useless. */
1371 bool useless_p () const;
1372
1373 /* Dump human readable context to F. */
1374 void dump (FILE *f) const;
1375 void DEBUG_FUNCTION debug () const;
1376
1377 /* LTO streaming. */
1378 void stream_out (struct output_block *) const;
1379 void stream_in (struct lto_input_block *, struct data_in *data_in);
1380
1381 private:
1382 bool combine_speculation_with (tree, HOST_WIDE_INT, bool, tree);
1383 void set_by_decl (tree, HOST_WIDE_INT);
1384 bool set_by_invariant (tree, tree, HOST_WIDE_INT);
1385 void clear_outer_type (tree otr_type = NULL);
1386 bool speculation_consistent_p (tree, HOST_WIDE_INT, bool, tree);
1387 void make_speculative (tree otr_type = NULL);
1388 };
1389
1390 /* Structure containing additional information about an indirect call. */
1391
1392 struct GTY(()) cgraph_indirect_call_info
1393 {
1394 /* When agg_content is set, an offset where the call pointer is located
1395 within the aggregate. */
1396 HOST_WIDE_INT offset;
1397 /* Context of the polymorphic call; use only when POLYMORPHIC flag is set. */
1398 ipa_polymorphic_call_context context;
1399 /* OBJ_TYPE_REF_TOKEN of a polymorphic call (if polymorphic is set). */
1400 HOST_WIDE_INT otr_token;
1401 /* Type of the object from OBJ_TYPE_REF_OBJECT. */
1402 tree otr_type;
1403 /* Index of the parameter that is called. */
1404 int param_index;
1405 /* ECF flags determined from the caller. */
1406 int ecf_flags;
1407 /* Profile_id of common target obtrained from profile. */
1408 int common_target_id;
1409 /* Probability that call will land in function with COMMON_TARGET_ID. */
1410 int common_target_probability;
1411
1412 /* Set when the call is a virtual call with the parameter being the
1413 associated object pointer rather than a simple direct call. */
1414 unsigned polymorphic : 1;
1415 /* Set when the call is a call of a pointer loaded from contents of an
1416 aggregate at offset. */
1417 unsigned agg_contents : 1;
1418 /* Set when this is a call through a member pointer. */
1419 unsigned member_ptr : 1;
1420 /* When the previous bit is set, this one determines whether the destination
1421 is loaded from a parameter passed by reference. */
1422 unsigned by_ref : 1;
1423 /* For polymorphic calls this specify whether the virtual table pointer
1424 may have changed in between function entry and the call. */
1425 unsigned vptr_changed : 1;
1426 };
1427
1428 struct GTY((chain_next ("%h.next_caller"), chain_prev ("%h.prev_caller"),
1429 for_user)) cgraph_edge {
1430 friend class cgraph_node;
1431
1432 /* Remove the edge in the cgraph. */
1433 void remove (void);
1434
1435 /* Change field call_stmt of edge to NEW_STMT.
1436 If UPDATE_SPECULATIVE and E is any component of speculative
1437 edge, then update all components. */
1438 void set_call_stmt (gimple new_stmt, bool update_speculative = true);
1439
1440 /* Redirect callee of the edge to N. The function does not update underlying
1441 call expression. */
1442 void redirect_callee (cgraph_node *n);
1443
1444 /* Make an indirect edge with an unknown callee an ordinary edge leading to
1445 CALLEE. DELTA is an integer constant that is to be added to the this
1446 pointer (first parameter) to compensate for skipping
1447 a thunk adjustment. */
1448 cgraph_edge *make_direct (cgraph_node *callee);
1449
1450 /* Turn edge into speculative call calling N2. Update
1451 the profile so the direct call is taken COUNT times
1452 with FREQUENCY. */
1453 cgraph_edge *make_speculative (cgraph_node *n2, gcov_type direct_count,
1454 int direct_frequency);
1455
1456 /* Given speculative call edge, return all three components. */
1457 void speculative_call_info (cgraph_edge *&direct, cgraph_edge *&indirect,
1458 ipa_ref *&reference);
1459
1460 /* Speculative call edge turned out to be direct call to CALLE_DECL.
1461 Remove the speculative call sequence and return edge representing the call.
1462 It is up to caller to redirect the call as appropriate. */
1463 cgraph_edge *resolve_speculation (tree callee_decl = NULL);
1464
1465 /* If necessary, change the function declaration in the call statement
1466 associated with the edge so that it corresponds to the edge callee. */
1467 gimple redirect_call_stmt_to_callee (void);
1468
1469 /* Create clone of edge in the node N represented
1470 by CALL_EXPR the callgraph. */
1471 cgraph_edge * clone (cgraph_node *n, gimple call_stmt, unsigned stmt_uid,
1472 gcov_type count_scale, int freq_scale, bool update_original);
1473
1474 /* Return true when call of edge can not lead to return from caller
1475 and thus it is safe to ignore its side effects for IPA analysis
1476 when computing side effects of the caller. */
1477 bool cannot_lead_to_return_p (void);
1478
1479 /* Return true when the edge represents a direct recursion. */
1480 bool recursive_p (void);
1481
1482 /* Return true if the call can be hot. */
1483 bool maybe_hot_p (void);
1484
1485 /* Rebuild cgraph edges for current function node. This needs to be run after
1486 passes that don't update the cgraph. */
1487 static unsigned int rebuild_edges (void);
1488
1489 /* Rebuild cgraph references for current function node. This needs to be run
1490 after passes that don't update the cgraph. */
1491 static void rebuild_references (void);
1492
1493 /* Expected number of executions: calculated in profile.c. */
1494 gcov_type count;
1495 cgraph_node *caller;
1496 cgraph_node *callee;
1497 cgraph_edge *prev_caller;
1498 cgraph_edge *next_caller;
1499 cgraph_edge *prev_callee;
1500 cgraph_edge *next_callee;
1501 gimple call_stmt;
1502 /* Additional information about an indirect call. Not cleared when an edge
1503 becomes direct. */
1504 cgraph_indirect_call_info *indirect_info;
1505 PTR GTY ((skip (""))) aux;
1506 /* When equal to CIF_OK, inline this call. Otherwise, points to the
1507 explanation why function was not inlined. */
1508 enum cgraph_inline_failed_t inline_failed;
1509 /* The stmt_uid of call_stmt. This is used by LTO to recover the call_stmt
1510 when the function is serialized in. */
1511 unsigned int lto_stmt_uid;
1512 /* Expected frequency of executions within the function.
1513 When set to CGRAPH_FREQ_BASE, the edge is expected to be called once
1514 per function call. The range is 0 to CGRAPH_FREQ_MAX. */
1515 int frequency;
1516 /* Unique id of the edge. */
1517 int uid;
1518 /* Whether this edge was made direct by indirect inlining. */
1519 unsigned int indirect_inlining_edge : 1;
1520 /* Whether this edge describes an indirect call with an undetermined
1521 callee. */
1522 unsigned int indirect_unknown_callee : 1;
1523 /* Whether this edge is still a dangling */
1524 /* True if the corresponding CALL stmt cannot be inlined. */
1525 unsigned int call_stmt_cannot_inline_p : 1;
1526 /* Can this call throw externally? */
1527 unsigned int can_throw_external : 1;
1528 /* Edges with SPECULATIVE flag represents indirect calls that was
1529 speculatively turned into direct (i.e. by profile feedback).
1530 The final code sequence will have form:
1531
1532 if (call_target == expected_fn)
1533 expected_fn ();
1534 else
1535 call_target ();
1536
1537 Every speculative call is represented by three components attached
1538 to a same call statement:
1539 1) a direct call (to expected_fn)
1540 2) an indirect call (to call_target)
1541 3) a IPA_REF_ADDR refrence to expected_fn.
1542
1543 Optimizers may later redirect direct call to clone, so 1) and 3)
1544 do not need to necesarily agree with destination. */
1545 unsigned int speculative : 1;
1546 /* Set to true when caller is a constructor or destructor of polymorphic
1547 type. */
1548 unsigned in_polymorphic_cdtor : 1;
1549
1550 private:
1551 /* Remove the edge from the list of the callers of the callee. */
1552 void remove_caller (void);
1553
1554 /* Remove the edge from the list of the callees of the caller. */
1555 void remove_callee (void);
1556 };
1557
1558 #define CGRAPH_FREQ_BASE 1000
1559 #define CGRAPH_FREQ_MAX 100000
1560
1561 /* The varpool data structure.
1562 Each static variable decl has assigned varpool_node. */
1563
1564 class GTY((tag ("SYMTAB_VARIABLE"))) varpool_node : public symtab_node {
1565 public:
1566 /* Dump given varpool node to F. */
1567 void dump (FILE *f);
1568
1569 /* Dump given varpool node to stderr. */
1570 void DEBUG_FUNCTION debug (void);
1571
1572 /* Remove variable from symbol table. */
1573 void remove (void);
1574
1575 /* Remove node initializer when it is no longer needed. */
1576 void remove_initializer (void);
1577
1578 void analyze (void);
1579
1580 /* Return variable availability. */
1581 availability get_availability (void);
1582
1583 /* When doing LTO, read variable's constructor from disk if
1584 it is not already present. */
1585 tree get_constructor (void);
1586
1587 /* Return true if variable has constructor that can be used for folding. */
1588 bool ctor_useable_for_folding_p (void);
1589
1590 /* For given variable pool node, walk the alias chain to return the function
1591 the variable is alias of. Do not walk through thunks.
1592 When AVAILABILITY is non-NULL, get minimal availability in the chain. */
1593 inline varpool_node *ultimate_alias_target
1594 (availability *availability = NULL);
1595
1596 /* Return node that alias is aliasing. */
1597 inline varpool_node *get_alias_target (void);
1598
1599 /* Output one variable, if necessary. Return whether we output it. */
1600 bool assemble_decl (void);
1601
1602 /* For variables in named sections make sure get_variable_section
1603 is called before we switch to those sections. Then section
1604 conflicts between read-only and read-only requiring relocations
1605 sections can be resolved. */
1606 void finalize_named_section_flags (void);
1607
1608 /* Call calback on varpool symbol and aliases associated to varpool symbol.
1609 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
1610 skipped. */
1611 bool call_for_node_and_aliases (bool (*callback) (varpool_node *, void *),
1612 void *data,
1613 bool include_overwritable);
1614
1615 /* Return true when variable should be considered externally visible. */
1616 bool externally_visible_p (void);
1617
1618 /* Return true when all references to variable must be visible
1619 in ipa_ref_list.
1620 i.e. if the variable is not externally visible or not used in some magic
1621 way (asm statement or such).
1622 The magic uses are all summarized in force_output flag. */
1623 inline bool all_refs_explicit_p ();
1624
1625 /* Return true when variable can be removed from variable pool
1626 if all direct calls are eliminated. */
1627 inline bool can_remove_if_no_refs_p (void);
1628
1629 /* Add the variable DECL to the varpool.
1630 Unlike finalize_decl function is intended to be used
1631 by middle end and allows insertion of new variable at arbitrary point
1632 of compilation. */
1633 static void add (tree decl);
1634
1635 /* Return varpool node for given symbol and check it is a function. */
1636 static inline varpool_node *get (const_tree decl);
1637
1638 /* Mark DECL as finalized. By finalizing the declaration, frontend instruct
1639 the middle end to output the variable to asm file, if needed or externally
1640 visible. */
1641 static void finalize_decl (tree decl);
1642
1643 /* Attempt to mark ALIAS as an alias to DECL. Return TRUE if successful.
1644 Extra name aliases are output whenever DECL is output. */
1645 static varpool_node * create_extra_name_alias (tree alias, tree decl);
1646
1647 /* Attempt to mark ALIAS as an alias to DECL. Return TRUE if successful.
1648 Extra name aliases are output whenever DECL is output. */
1649 static varpool_node * create_alias (tree, tree);
1650
1651 /* Dump the variable pool to F. */
1652 static void dump_varpool (FILE *f);
1653
1654 /* Dump the variable pool to stderr. */
1655 static void DEBUG_FUNCTION debug_varpool (void);
1656
1657 /* Allocate new callgraph node and insert it into basic data structures. */
1658 static varpool_node *create_empty (void);
1659
1660 /* Return varpool node assigned to DECL. Create new one when needed. */
1661 static varpool_node *get_create (tree decl);
1662
1663 /* Given an assembler name, lookup node. */
1664 static varpool_node *get_for_asmname (tree asmname);
1665
1666 /* Set when variable is scheduled to be assembled. */
1667 unsigned output : 1;
1668
1669 /* Set if the variable is dynamically initialized, except for
1670 function local statics. */
1671 unsigned dynamically_initialized : 1;
1672
1673 ENUM_BITFIELD(tls_model) tls_model : 3;
1674
1675 /* Set if the variable is known to be used by single function only.
1676 This is computed by ipa_signle_use pass and used by late optimizations
1677 in places where optimization would be valid for local static variable
1678 if we did not do any inter-procedural code movement. */
1679 unsigned used_by_single_function : 1;
1680
1681 private:
1682 /* Assemble thunks and aliases associated to varpool node. */
1683 void assemble_aliases (void);
1684 };
1685
1686 /* Every top level asm statement is put into a asm_node. */
1687
1688 struct GTY(()) asm_node {
1689
1690
1691 /* Next asm node. */
1692 asm_node *next;
1693 /* String for this asm node. */
1694 tree asm_str;
1695 /* Ordering of all cgraph nodes. */
1696 int order;
1697 };
1698
1699 /* Report whether or not THIS symtab node is a function, aka cgraph_node. */
1700
1701 template <>
1702 template <>
1703 inline bool
1704 is_a_helper <cgraph_node *>::test (symtab_node *p)
1705 {
1706 return p && p->type == SYMTAB_FUNCTION;
1707 }
1708
1709 /* Report whether or not THIS symtab node is a vriable, aka varpool_node. */
1710
1711 template <>
1712 template <>
1713 inline bool
1714 is_a_helper <varpool_node *>::test (symtab_node *p)
1715 {
1716 return p && p->type == SYMTAB_VARIABLE;
1717 }
1718
1719 /* Macros to access the next item in the list of free cgraph nodes and
1720 edges. */
1721 #define NEXT_FREE_NODE(NODE) dyn_cast<cgraph_node *> ((NODE)->next)
1722 #define SET_NEXT_FREE_NODE(NODE,NODE2) ((NODE))->next = NODE2
1723 #define NEXT_FREE_EDGE(EDGE) (EDGE)->prev_caller
1724
1725 typedef void (*cgraph_edge_hook)(cgraph_edge *, void *);
1726 typedef void (*cgraph_node_hook)(cgraph_node *, void *);
1727 typedef void (*varpool_node_hook)(varpool_node *, void *);
1728 typedef void (*cgraph_2edge_hook)(cgraph_edge *, cgraph_edge *, void *);
1729 typedef void (*cgraph_2node_hook)(cgraph_node *, cgraph_node *, void *);
1730
1731 struct cgraph_edge_hook_list;
1732 struct cgraph_node_hook_list;
1733 struct varpool_node_hook_list;
1734 struct cgraph_2edge_hook_list;
1735 struct cgraph_2node_hook_list;
1736
1737 /* Map from a symbol to initialization/finalization priorities. */
1738 struct GTY(()) symbol_priority_map {
1739 priority_type init;
1740 priority_type fini;
1741 };
1742
1743 enum symtab_state
1744 {
1745 /* Frontend is parsing and finalizing functions. */
1746 PARSING,
1747 /* Callgraph is being constructed. It is safe to add new functions. */
1748 CONSTRUCTION,
1749 /* Callgraph is being at LTO time. */
1750 LTO_STREAMING,
1751 /* Callgraph is built and IPA passes are being run. */
1752 IPA,
1753 /* Callgraph is built and all functions are transformed to SSA form. */
1754 IPA_SSA,
1755 /* Functions are now ordered and being passed to RTL expanders. */
1756 EXPANSION,
1757 /* All cgraph expansion is done. */
1758 FINISHED
1759 };
1760
1761 struct asmname_hasher
1762 {
1763 typedef symtab_node *value_type;
1764 typedef const_tree compare_type;
1765 typedef int store_values_directly;
1766
1767 static hashval_t hash (symtab_node *n);
1768 static bool equal (symtab_node *n, const_tree t);
1769 static void ggc_mx (symtab_node *n);
1770 static void pch_nx (symtab_node *&);
1771 static void pch_nx (symtab_node *&, gt_pointer_operator, void *);
1772 static void remove (symtab_node *) {}
1773 };
1774
1775 class GTY((tag ("SYMTAB"))) symbol_table
1776 {
1777 public:
1778 friend class symtab_node;
1779 friend class cgraph_node;
1780 friend class cgraph_edge;
1781
1782 /* Initialize callgraph dump file. */
1783 inline void
1784 initialize (void)
1785 {
1786 if (!dump_file)
1787 dump_file = dump_begin (TDI_cgraph, NULL);
1788 }
1789
1790 /* Register a top-level asm statement ASM_STR. */
1791 inline asm_node *finalize_toplevel_asm (tree asm_str);
1792
1793 /* Analyze the whole compilation unit once it is parsed completely. */
1794 void finalize_compilation_unit (void);
1795
1796 /* C++ frontend produce same body aliases all over the place, even before PCH
1797 gets streamed out. It relies on us linking the aliases with their function
1798 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
1799 first produce aliases without links, but once C++ FE is sure he won't sream
1800 PCH we build the links via this function. */
1801 void process_same_body_aliases (void);
1802
1803 /* Perform simple optimizations based on callgraph. */
1804 void compile (void);
1805
1806 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
1807 functions into callgraph in a way so they look like ordinary reachable
1808 functions inserted into callgraph already at construction time. */
1809 void process_new_functions (void);
1810
1811 /* Once all functions from compilation unit are in memory, produce all clones
1812 and update all calls. We might also do this on demand if we don't want to
1813 bring all functions to memory prior compilation, but current WHOPR
1814 implementation does that and it is is bit easier to keep everything right
1815 in this order. */
1816 void materialize_all_clones (void);
1817
1818 /* Register a symbol NODE. */
1819 inline void register_symbol (symtab_node *node);
1820
1821 inline void
1822 clear_asm_symbols (void)
1823 {
1824 asmnodes = NULL;
1825 asm_last_node = NULL;
1826 }
1827
1828 /* Perform reachability analysis and reclaim all unreachable nodes. */
1829 bool remove_unreachable_nodes (bool before_inlining_p, FILE *file);
1830
1831 /* Optimization of function bodies might've rendered some variables as
1832 unnecessary so we want to avoid these from being compiled. Re-do
1833 reachability starting from variables that are either externally visible
1834 or was referred from the asm output routines. */
1835 void remove_unreferenced_decls (void);
1836
1837 /* Unregister a symbol NODE. */
1838 inline void unregister (symtab_node *node);
1839
1840 /* Allocate new callgraph node and insert it into basic data structures. */
1841 cgraph_node *create_empty (void);
1842
1843 /* Release a callgraph NODE with UID and put in to the list
1844 of free nodes. */
1845 void release_symbol (cgraph_node *node, int uid);
1846
1847 /* Output all variables enqueued to be assembled. */
1848 bool output_variables (void);
1849
1850 /* Weakrefs may be associated to external decls and thus not output
1851 at expansion time. Emit all necessary aliases. */
1852 void output_weakrefs (void);
1853
1854 /* Return first static symbol with definition. */
1855 inline symtab_node *first_symbol (void);
1856
1857 /* Return first assembler symbol. */
1858 inline asm_node *
1859 first_asm_symbol (void)
1860 {
1861 return asmnodes;
1862 }
1863
1864 /* Return first static symbol with definition. */
1865 inline symtab_node *first_defined_symbol (void);
1866
1867 /* Return first variable. */
1868 inline varpool_node *first_variable (void);
1869
1870 /* Return next variable after NODE. */
1871 inline varpool_node *next_variable (varpool_node *node);
1872
1873 /* Return first static variable with initializer. */
1874 inline varpool_node *first_static_initializer (void);
1875
1876 /* Return next static variable with initializer after NODE. */
1877 inline varpool_node *next_static_initializer (varpool_node *node);
1878
1879 /* Return first static variable with definition. */
1880 inline varpool_node *first_defined_variable (void);
1881
1882 /* Return next static variable with definition after NODE. */
1883 inline varpool_node *next_defined_variable (varpool_node *node);
1884
1885 /* Return first function with body defined. */
1886 inline cgraph_node *first_defined_function (void);
1887
1888 /* Return next function with body defined after NODE. */
1889 inline cgraph_node *next_defined_function (cgraph_node *node);
1890
1891 /* Return first function. */
1892 inline cgraph_node *first_function (void);
1893
1894 /* Return next function. */
1895 inline cgraph_node *next_function (cgraph_node *node);
1896
1897 /* Return first function with body defined. */
1898 cgraph_node *first_function_with_gimple_body (void);
1899
1900 /* Return next reachable static variable with initializer after NODE. */
1901 inline cgraph_node *next_function_with_gimple_body (cgraph_node *node);
1902
1903 /* Register HOOK to be called with DATA on each removed edge. */
1904 cgraph_edge_hook_list *add_edge_removal_hook (cgraph_edge_hook hook,
1905 void *data);
1906
1907 /* Remove ENTRY from the list of hooks called on removing edges. */
1908 void remove_edge_removal_hook (cgraph_edge_hook_list *entry);
1909
1910 /* Register HOOK to be called with DATA on each removed node. */
1911 cgraph_node_hook_list *add_cgraph_removal_hook (cgraph_node_hook hook,
1912 void *data);
1913
1914 /* Remove ENTRY from the list of hooks called on removing nodes. */
1915 void remove_cgraph_removal_hook (cgraph_node_hook_list *entry);
1916
1917 /* Register HOOK to be called with DATA on each removed node. */
1918 varpool_node_hook_list *add_varpool_removal_hook (varpool_node_hook hook,
1919 void *data);
1920
1921 /* Remove ENTRY from the list of hooks called on removing nodes. */
1922 void remove_varpool_removal_hook (varpool_node_hook_list *entry);
1923
1924 /* Register HOOK to be called with DATA on each inserted node. */
1925 cgraph_node_hook_list *add_cgraph_insertion_hook (cgraph_node_hook hook,
1926 void *data);
1927
1928 /* Remove ENTRY from the list of hooks called on inserted nodes. */
1929 void remove_cgraph_insertion_hook (cgraph_node_hook_list *entry);
1930
1931 /* Register HOOK to be called with DATA on each inserted node. */
1932 varpool_node_hook_list *add_varpool_insertion_hook (varpool_node_hook hook,
1933 void *data);
1934
1935 /* Remove ENTRY from the list of hooks called on inserted nodes. */
1936 void remove_varpool_insertion_hook (varpool_node_hook_list *entry);
1937
1938 /* Register HOOK to be called with DATA on each duplicated edge. */
1939 cgraph_2edge_hook_list *add_edge_duplication_hook (cgraph_2edge_hook hook,
1940 void *data);
1941 /* Remove ENTRY from the list of hooks called on duplicating edges. */
1942 void remove_edge_duplication_hook (cgraph_2edge_hook_list *entry);
1943
1944 /* Register HOOK to be called with DATA on each duplicated node. */
1945 cgraph_2node_hook_list *add_cgraph_duplication_hook (cgraph_2node_hook hook,
1946 void *data);
1947
1948 /* Remove ENTRY from the list of hooks called on duplicating nodes. */
1949 void remove_cgraph_duplication_hook (cgraph_2node_hook_list *entry);
1950
1951 /* Call all edge removal hooks. */
1952 void call_edge_removal_hooks (cgraph_edge *e);
1953
1954 /* Call all node insertion hooks. */
1955 void call_cgraph_insertion_hooks (cgraph_node *node);
1956
1957 /* Call all node removal hooks. */
1958 void call_cgraph_removal_hooks (cgraph_node *node);
1959
1960 /* Call all node duplication hooks. */
1961 void call_cgraph_duplication_hooks (cgraph_node *node, cgraph_node *node2);
1962
1963 /* Call all edge duplication hooks. */
1964 void call_edge_duplication_hooks (cgraph_edge *cs1, cgraph_edge *cs2);
1965
1966 /* Call all node removal hooks. */
1967 void call_varpool_removal_hooks (varpool_node *node);
1968
1969 /* Call all node insertion hooks. */
1970 void call_varpool_insertion_hooks (varpool_node *node);
1971
1972 /* Arrange node to be first in its entry of assembler_name_hash. */
1973 void symtab_prevail_in_asm_name_hash (symtab_node *node);
1974
1975 /* Initalize asm name hash unless. */
1976 void symtab_initialize_asm_name_hash (void);
1977
1978 /* Set the DECL_ASSEMBLER_NAME and update symtab hashtables. */
1979 void change_decl_assembler_name (tree decl, tree name);
1980
1981 int cgraph_count;
1982 int cgraph_max_uid;
1983
1984 int edges_count;
1985 int edges_max_uid;
1986
1987 symtab_node* GTY(()) nodes;
1988 asm_node* GTY(()) asmnodes;
1989 asm_node* GTY(()) asm_last_node;
1990 cgraph_node* GTY(()) free_nodes;
1991
1992 /* Head of a linked list of unused (freed) call graph edges.
1993 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
1994 cgraph_edge * GTY(()) free_edges;
1995
1996 /* The order index of the next symtab node to be created. This is
1997 used so that we can sort the cgraph nodes in order by when we saw
1998 them, to support -fno-toplevel-reorder. */
1999 int order;
2000
2001 /* Set when whole unit has been analyzed so we can access global info. */
2002 bool global_info_ready;
2003 /* What state callgraph is in right now. */
2004 enum symtab_state state;
2005 /* Set when the cgraph is fully build and the basic flags are computed. */
2006 bool function_flags_ready;
2007
2008 bool cpp_implicit_aliases_done;
2009
2010 /* Hash table used to hold sectoons. */
2011 hash_table<section_name_hasher> *GTY(()) section_hash;
2012
2013 /* Hash table used to convert assembler names into nodes. */
2014 hash_table<asmname_hasher> *assembler_name_hash;
2015
2016 /* Hash table used to hold init priorities. */
2017 hash_map<symtab_node *, symbol_priority_map> *init_priority_hash;
2018
2019 FILE* GTY ((skip)) dump_file;
2020
2021 private:
2022 /* Allocate new callgraph node. */
2023 inline cgraph_node * allocate_cgraph_symbol (void);
2024
2025 /* Allocate a cgraph_edge structure and fill it with data according to the
2026 parameters of which only CALLEE can be NULL (when creating an indirect call
2027 edge). */
2028 cgraph_edge *create_edge (cgraph_node *caller, cgraph_node *callee,
2029 gimple call_stmt, gcov_type count, int freq,
2030 bool indir_unknown_callee);
2031
2032 /* Put the edge onto the free list. */
2033 void free_edge (cgraph_edge *e);
2034
2035 /* Insert NODE to assembler name hash. */
2036 void insert_to_assembler_name_hash (symtab_node *node, bool with_clones);
2037
2038 /* Remove NODE from assembler name hash. */
2039 void unlink_from_assembler_name_hash (symtab_node *node, bool with_clones);
2040
2041 /* Hash asmnames ignoring the user specified marks. */
2042 static hashval_t decl_assembler_name_hash (const_tree asmname);
2043
2044 /* Compare ASMNAME with the DECL_ASSEMBLER_NAME of DECL. */
2045 static bool decl_assembler_name_equal (tree decl, const_tree asmname);
2046
2047 friend struct asmname_hasher;
2048
2049 /* List of hooks triggered when an edge is removed. */
2050 cgraph_edge_hook_list * GTY((skip)) m_first_edge_removal_hook;
2051 /* List of hooks triggem_red when a cgraph node is removed. */
2052 cgraph_node_hook_list * GTY((skip)) m_first_cgraph_removal_hook;
2053 /* List of hooks triggered when an edge is duplicated. */
2054 cgraph_2edge_hook_list * GTY((skip)) m_first_edge_duplicated_hook;
2055 /* List of hooks triggered when a node is duplicated. */
2056 cgraph_2node_hook_list * GTY((skip)) m_first_cgraph_duplicated_hook;
2057 /* List of hooks triggered when an function is inserted. */
2058 cgraph_node_hook_list * GTY((skip)) m_first_cgraph_insertion_hook;
2059 /* List of hooks triggered when an variable is inserted. */
2060 varpool_node_hook_list * GTY((skip)) m_first_varpool_insertion_hook;
2061 /* List of hooks triggered when a node is removed. */
2062 varpool_node_hook_list * GTY((skip)) m_first_varpool_removal_hook;
2063 };
2064
2065 extern GTY(()) symbol_table *symtab;
2066
2067 extern vec<cgraph_node *> cgraph_new_nodes;
2068
2069 inline hashval_t
2070 asmname_hasher::hash (symtab_node *n)
2071 {
2072 return symbol_table::decl_assembler_name_hash
2073 (DECL_ASSEMBLER_NAME (n->decl));
2074 }
2075
2076 inline bool
2077 asmname_hasher::equal (symtab_node *n, const_tree t)
2078 {
2079 return symbol_table::decl_assembler_name_equal (n->decl, t);
2080 }
2081
2082 extern void gt_ggc_mx (symtab_node *&);
2083
2084 inline void
2085 asmname_hasher::ggc_mx (symtab_node *n)
2086 {
2087 gt_ggc_mx (n);
2088 }
2089
2090 extern void gt_pch_nx (symtab_node *&);
2091
2092 inline void
2093 asmname_hasher::pch_nx (symtab_node *&n)
2094 {
2095 gt_pch_nx (n);
2096 }
2097
2098 inline void
2099 asmname_hasher::pch_nx (symtab_node *&n, gt_pointer_operator op, void *cookie)
2100 {
2101 op (&n, cookie);
2102 }
2103
2104 /* In cgraph.c */
2105 void release_function_body (tree);
2106 cgraph_indirect_call_info *cgraph_allocate_init_indirect_info (void);
2107
2108 void cgraph_update_edges_for_call_stmt (gimple, tree, gimple);
2109 bool cgraph_function_possibly_inlined_p (tree);
2110
2111 const char* cgraph_inline_failed_string (cgraph_inline_failed_t);
2112 cgraph_inline_failed_type_t cgraph_inline_failed_type (cgraph_inline_failed_t);
2113
2114 bool resolution_used_from_other_file_p (enum ld_plugin_symbol_resolution);
2115 extern bool gimple_check_call_matching_types (gimple, tree, bool);
2116
2117 /* In cgraphunit.c */
2118 /* Initialize datastructures so DECL is a function in lowered gimple form.
2119 IN_SSA is true if the gimple is in SSA. */
2120 basic_block init_lowered_empty_function (tree, bool);
2121
2122 /* In cgraphclones.c */
2123
2124 tree clone_function_name (tree decl, const char *);
2125
2126 void tree_function_versioning (tree, tree, vec<ipa_replace_map *, va_gc> *,
2127 bool, bitmap, bool, bitmap, basic_block);
2128
2129 /* In cgraphbuild.c */
2130 int compute_call_stmt_bb_frequency (tree, basic_block bb);
2131 void record_references_in_initializer (tree, bool);
2132
2133 /* In ipa.c */
2134 void cgraph_build_static_cdtor (char which, tree body, int priority);
2135 void ipa_discover_readonly_nonaddressable_vars (void);
2136
2137 /* In varpool.c */
2138 tree ctor_for_folding (tree);
2139
2140 /* Return true when the symbol is real symbol, i.e. it is not inline clone
2141 or abstract function kept for debug info purposes only. */
2142 inline bool
2143 symtab_node::real_symbol_p (void)
2144 {
2145 cgraph_node *cnode;
2146
2147 if (DECL_ABSTRACT_P (decl))
2148 return false;
2149 if (!is_a <cgraph_node *> (this))
2150 return true;
2151 cnode = dyn_cast <cgraph_node *> (this);
2152 if (cnode->global.inlined_to)
2153 return false;
2154 return true;
2155 }
2156
2157 /* Return true if DECL should have entry in symbol table if used.
2158 Those are functions and static & external veriables*/
2159
2160 static inline bool
2161 decl_in_symtab_p (const_tree decl)
2162 {
2163 return (TREE_CODE (decl) == FUNCTION_DECL
2164 || (TREE_CODE (decl) == VAR_DECL
2165 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl))));
2166 }
2167
2168 inline bool
2169 symtab_node::in_same_comdat_group_p (symtab_node *target)
2170 {
2171 symtab_node *source = this;
2172
2173 if (cgraph_node *cn = dyn_cast <cgraph_node *> (target))
2174 {
2175 if (cn->global.inlined_to)
2176 source = cn->global.inlined_to;
2177 }
2178 if (cgraph_node *cn = dyn_cast <cgraph_node *> (target))
2179 {
2180 if (cn->global.inlined_to)
2181 target = cn->global.inlined_to;
2182 }
2183
2184 return source->get_comdat_group () == target->get_comdat_group ();
2185 }
2186
2187 /* Return node that alias is aliasing. */
2188
2189 inline symtab_node *
2190 symtab_node::get_alias_target (void)
2191 {
2192 ipa_ref *ref = NULL;
2193 iterate_reference (0, ref);
2194 gcc_checking_assert (ref->use == IPA_REF_ALIAS);
2195 return ref->referred;
2196 }
2197
2198 /* Return next reachable static symbol with initializer after the node. */
2199 inline symtab_node *
2200 symtab_node::next_defined_symbol (void)
2201 {
2202 symtab_node *node1 = next;
2203
2204 for (; node1; node1 = node1->next)
2205 if (node1->definition)
2206 return node1;
2207
2208 return NULL;
2209 }
2210
2211 /* Return varpool node for given symbol and check it is a function. */
2212
2213 inline varpool_node *
2214 varpool_node::get (const_tree decl)
2215 {
2216 gcc_checking_assert (TREE_CODE (decl) == VAR_DECL);
2217 return dyn_cast<varpool_node *> (symtab_node::get (decl));
2218 }
2219
2220 /* Register a symbol NODE. */
2221
2222 inline void
2223 symbol_table::register_symbol (symtab_node *node)
2224 {
2225 node->next = nodes;
2226 node->previous = NULL;
2227
2228 if (nodes)
2229 nodes->previous = node;
2230 nodes = node;
2231
2232 node->order = order++;
2233 }
2234
2235 /* Register a top-level asm statement ASM_STR. */
2236
2237 asm_node *
2238 symbol_table::finalize_toplevel_asm (tree asm_str)
2239 {
2240 asm_node *node;
2241
2242 node = ggc_cleared_alloc<asm_node> ();
2243 node->asm_str = asm_str;
2244 node->order = order++;
2245 node->next = NULL;
2246
2247 if (asmnodes == NULL)
2248 asmnodes = node;
2249 else
2250 asm_last_node->next = node;
2251
2252 asm_last_node = node;
2253 return node;
2254 }
2255
2256 /* Unregister a symbol NODE. */
2257 inline void
2258 symbol_table::unregister (symtab_node *node)
2259 {
2260 if (node->previous)
2261 node->previous->next = node->next;
2262 else
2263 nodes = node->next;
2264
2265 if (node->next)
2266 node->next->previous = node->previous;
2267
2268 node->next = NULL;
2269 node->previous = NULL;
2270 }
2271
2272 /* Allocate new callgraph node and insert it into basic data structures. */
2273
2274 inline cgraph_node *
2275 symbol_table::create_empty (void)
2276 {
2277 cgraph_node *node = allocate_cgraph_symbol ();
2278
2279 node->type = SYMTAB_FUNCTION;
2280 node->frequency = NODE_FREQUENCY_NORMAL;
2281 node->count_materialization_scale = REG_BR_PROB_BASE;
2282 cgraph_count++;
2283
2284 return node;
2285 }
2286
2287 /* Release a callgraph NODE with UID and put in to the list of free nodes. */
2288
2289 inline void
2290 symbol_table::release_symbol (cgraph_node *node, int uid)
2291 {
2292 cgraph_count--;
2293
2294 /* Clear out the node to NULL all pointers and add the node to the free
2295 list. */
2296 memset (node, 0, sizeof (*node));
2297 node->type = SYMTAB_FUNCTION;
2298 node->uid = uid;
2299 SET_NEXT_FREE_NODE (node, free_nodes);
2300 free_nodes = node;
2301 }
2302
2303 /* Allocate new callgraph node. */
2304
2305 inline cgraph_node *
2306 symbol_table::allocate_cgraph_symbol (void)
2307 {
2308 cgraph_node *node;
2309
2310 if (free_nodes)
2311 {
2312 node = free_nodes;
2313 free_nodes = NEXT_FREE_NODE (node);
2314 }
2315 else
2316 {
2317 node = ggc_cleared_alloc<cgraph_node> ();
2318 node->uid = cgraph_max_uid++;
2319 }
2320
2321 return node;
2322 }
2323
2324
2325 /* Return first static symbol with definition. */
2326 inline symtab_node *
2327 symbol_table::first_symbol (void)
2328 {
2329 return nodes;
2330 }
2331
2332 /* Walk all symbols. */
2333 #define FOR_EACH_SYMBOL(node) \
2334 for ((node) = symtab->first_symbol (); (node); (node) = (node)->next)
2335
2336 /* Return first static symbol with definition. */
2337 inline symtab_node *
2338 symbol_table::first_defined_symbol (void)
2339 {
2340 symtab_node *node;
2341
2342 for (node = nodes; node; node = node->next)
2343 if (node->definition)
2344 return node;
2345
2346 return NULL;
2347 }
2348
2349 /* Walk all symbols with definitions in current unit. */
2350 #define FOR_EACH_DEFINED_SYMBOL(node) \
2351 for ((node) = symtab->first_defined_symbol (); (node); \
2352 (node) = node->next_defined_symbol ())
2353
2354 /* Return first variable. */
2355 inline varpool_node *
2356 symbol_table::first_variable (void)
2357 {
2358 symtab_node *node;
2359 for (node = nodes; node; node = node->next)
2360 if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
2361 return vnode;
2362 return NULL;
2363 }
2364
2365 /* Return next variable after NODE. */
2366 inline varpool_node *
2367 symbol_table::next_variable (varpool_node *node)
2368 {
2369 symtab_node *node1 = node->next;
2370 for (; node1; node1 = node1->next)
2371 if (varpool_node *vnode1 = dyn_cast <varpool_node *> (node1))
2372 return vnode1;
2373 return NULL;
2374 }
2375 /* Walk all variables. */
2376 #define FOR_EACH_VARIABLE(node) \
2377 for ((node) = symtab->first_variable (); \
2378 (node); \
2379 (node) = symtab->next_variable ((node)))
2380
2381 /* Return first static variable with initializer. */
2382 inline varpool_node *
2383 symbol_table::first_static_initializer (void)
2384 {
2385 symtab_node *node;
2386 for (node = nodes; node; node = node->next)
2387 {
2388 varpool_node *vnode = dyn_cast <varpool_node *> (node);
2389 if (vnode && DECL_INITIAL (node->decl))
2390 return vnode;
2391 }
2392 return NULL;
2393 }
2394
2395 /* Return next static variable with initializer after NODE. */
2396 inline varpool_node *
2397 symbol_table::next_static_initializer (varpool_node *node)
2398 {
2399 symtab_node *node1 = node->next;
2400 for (; node1; node1 = node1->next)
2401 {
2402 varpool_node *vnode1 = dyn_cast <varpool_node *> (node1);
2403 if (vnode1 && DECL_INITIAL (node1->decl))
2404 return vnode1;
2405 }
2406 return NULL;
2407 }
2408
2409 /* Walk all static variables with initializer set. */
2410 #define FOR_EACH_STATIC_INITIALIZER(node) \
2411 for ((node) = symtab->first_static_initializer (); (node); \
2412 (node) = symtab->next_static_initializer (node))
2413
2414 /* Return first static variable with definition. */
2415 inline varpool_node *
2416 symbol_table::first_defined_variable (void)
2417 {
2418 symtab_node *node;
2419 for (node = nodes; node; node = node->next)
2420 {
2421 varpool_node *vnode = dyn_cast <varpool_node *> (node);
2422 if (vnode && vnode->definition)
2423 return vnode;
2424 }
2425 return NULL;
2426 }
2427
2428 /* Return next static variable with definition after NODE. */
2429 inline varpool_node *
2430 symbol_table::next_defined_variable (varpool_node *node)
2431 {
2432 symtab_node *node1 = node->next;
2433 for (; node1; node1 = node1->next)
2434 {
2435 varpool_node *vnode1 = dyn_cast <varpool_node *> (node1);
2436 if (vnode1 && vnode1->definition)
2437 return vnode1;
2438 }
2439 return NULL;
2440 }
2441 /* Walk all variables with definitions in current unit. */
2442 #define FOR_EACH_DEFINED_VARIABLE(node) \
2443 for ((node) = symtab->first_defined_variable (); (node); \
2444 (node) = symtab->next_defined_variable (node))
2445
2446 /* Return first function with body defined. */
2447 inline cgraph_node *
2448 symbol_table::first_defined_function (void)
2449 {
2450 symtab_node *node;
2451 for (node = nodes; node; node = node->next)
2452 {
2453 cgraph_node *cn = dyn_cast <cgraph_node *> (node);
2454 if (cn && cn->definition)
2455 return cn;
2456 }
2457 return NULL;
2458 }
2459
2460 /* Return next function with body defined after NODE. */
2461 inline cgraph_node *
2462 symbol_table::next_defined_function (cgraph_node *node)
2463 {
2464 symtab_node *node1 = node->next;
2465 for (; node1; node1 = node1->next)
2466 {
2467 cgraph_node *cn1 = dyn_cast <cgraph_node *> (node1);
2468 if (cn1 && cn1->definition)
2469 return cn1;
2470 }
2471 return NULL;
2472 }
2473
2474 /* Walk all functions with body defined. */
2475 #define FOR_EACH_DEFINED_FUNCTION(node) \
2476 for ((node) = symtab->first_defined_function (); (node); \
2477 (node) = symtab->next_defined_function ((node)))
2478
2479 /* Return first function. */
2480 inline cgraph_node *
2481 symbol_table::first_function (void)
2482 {
2483 symtab_node *node;
2484 for (node = nodes; node; node = node->next)
2485 if (cgraph_node *cn = dyn_cast <cgraph_node *> (node))
2486 return cn;
2487 return NULL;
2488 }
2489
2490 /* Return next function. */
2491 inline cgraph_node *
2492 symbol_table::next_function (cgraph_node *node)
2493 {
2494 symtab_node *node1 = node->next;
2495 for (; node1; node1 = node1->next)
2496 if (cgraph_node *cn1 = dyn_cast <cgraph_node *> (node1))
2497 return cn1;
2498 return NULL;
2499 }
2500
2501 /* Return first function with body defined. */
2502 inline cgraph_node *
2503 symbol_table::first_function_with_gimple_body (void)
2504 {
2505 symtab_node *node;
2506 for (node = nodes; node; node = node->next)
2507 {
2508 cgraph_node *cn = dyn_cast <cgraph_node *> (node);
2509 if (cn && cn->has_gimple_body_p ())
2510 return cn;
2511 }
2512 return NULL;
2513 }
2514
2515 /* Return next reachable static variable with initializer after NODE. */
2516 inline cgraph_node *
2517 symbol_table::next_function_with_gimple_body (cgraph_node *node)
2518 {
2519 symtab_node *node1 = node->next;
2520 for (; node1; node1 = node1->next)
2521 {
2522 cgraph_node *cn1 = dyn_cast <cgraph_node *> (node1);
2523 if (cn1 && cn1->has_gimple_body_p ())
2524 return cn1;
2525 }
2526 return NULL;
2527 }
2528
2529 /* Walk all functions. */
2530 #define FOR_EACH_FUNCTION(node) \
2531 for ((node) = symtab->first_function (); (node); \
2532 (node) = symtab->next_function ((node)))
2533
2534 /* Return true when callgraph node is a function with Gimple body defined
2535 in current unit. Functions can also be define externally or they
2536 can be thunks with no Gimple representation.
2537
2538 Note that at WPA stage, the function body may not be present in memory. */
2539
2540 inline bool
2541 cgraph_node::has_gimple_body_p (void)
2542 {
2543 return definition && !thunk.thunk_p && !alias;
2544 }
2545
2546 /* Walk all functions with body defined. */
2547 #define FOR_EACH_FUNCTION_WITH_GIMPLE_BODY(node) \
2548 for ((node) = symtab->first_function_with_gimple_body (); (node); \
2549 (node) = symtab->next_function_with_gimple_body (node))
2550
2551 /* Create a new static variable of type TYPE. */
2552 tree add_new_static_var (tree type);
2553
2554 /* Uniquize all constants that appear in memory.
2555 Each constant in memory thus far output is recorded
2556 in `const_desc_table'. */
2557
2558 struct GTY((for_user)) constant_descriptor_tree {
2559 /* A MEM for the constant. */
2560 rtx rtl;
2561
2562 /* The value of the constant. */
2563 tree value;
2564
2565 /* Hash of value. Computing the hash from value each time
2566 hashfn is called can't work properly, as that means recursive
2567 use of the hash table during hash table expansion. */
2568 hashval_t hash;
2569 };
2570
2571 /* Return true when function is only called directly or it has alias.
2572 i.e. it is not externally visible, address was not taken and
2573 it is not used in any other non-standard way. */
2574
2575 inline bool
2576 cgraph_node::only_called_directly_or_aliased_p (void)
2577 {
2578 gcc_assert (!global.inlined_to);
2579 return (!force_output && !address_taken
2580 && !used_from_other_partition
2581 && !DECL_VIRTUAL_P (decl)
2582 && !DECL_STATIC_CONSTRUCTOR (decl)
2583 && !DECL_STATIC_DESTRUCTOR (decl)
2584 && !externally_visible);
2585 }
2586
2587 /* Return true when variable can be removed from variable pool
2588 if all direct calls are eliminated. */
2589
2590 inline bool
2591 varpool_node::can_remove_if_no_refs_p (void)
2592 {
2593 if (DECL_EXTERNAL (decl))
2594 return true;
2595 return (!force_output && !used_from_other_partition
2596 && ((DECL_COMDAT (decl)
2597 && !forced_by_abi
2598 && !used_from_object_file_p ())
2599 || !externally_visible
2600 || DECL_HAS_VALUE_EXPR_P (decl)));
2601 }
2602
2603 /* Return true when all references to variable must be visible in ipa_ref_list.
2604 i.e. if the variable is not externally visible or not used in some magic
2605 way (asm statement or such).
2606 The magic uses are all summarized in force_output flag. */
2607
2608 inline bool
2609 varpool_node::all_refs_explicit_p ()
2610 {
2611 return (definition
2612 && !externally_visible
2613 && !used_from_other_partition
2614 && !force_output);
2615 }
2616
2617 struct tree_descriptor_hasher : ggc_hasher<constant_descriptor_tree *>
2618 {
2619 static hashval_t hash (constant_descriptor_tree *);
2620 static bool equal (constant_descriptor_tree *, constant_descriptor_tree *);
2621 };
2622
2623 /* Constant pool accessor function. */
2624 hash_table<tree_descriptor_hasher> *constant_pool_htab (void);
2625
2626 /* Return node that alias is aliasing. */
2627
2628 inline cgraph_node *
2629 cgraph_node::get_alias_target (void)
2630 {
2631 return dyn_cast <cgraph_node *> (symtab_node::get_alias_target ());
2632 }
2633
2634 /* Return node that alias is aliasing. */
2635
2636 inline varpool_node *
2637 varpool_node::get_alias_target (void)
2638 {
2639 return dyn_cast <varpool_node *> (symtab_node::get_alias_target ());
2640 }
2641
2642 /* Given function symbol, walk the alias chain to return the function node
2643 is alias of. Do not walk through thunks.
2644 When AVAILABILITY is non-NULL, get minimal availability in the chain. */
2645
2646 inline cgraph_node *
2647 cgraph_node::ultimate_alias_target (enum availability *availability)
2648 {
2649 cgraph_node *n = dyn_cast <cgraph_node *> (symtab_node::ultimate_alias_target
2650 (availability));
2651 if (!n && availability)
2652 *availability = AVAIL_NOT_AVAILABLE;
2653 return n;
2654 }
2655
2656 /* For given variable pool node, walk the alias chain to return the function
2657 the variable is alias of. Do not walk through thunks.
2658 When AVAILABILITY is non-NULL, get minimal availability in the chain. */
2659
2660 inline varpool_node *
2661 varpool_node::ultimate_alias_target (availability *availability)
2662 {
2663 varpool_node *n = dyn_cast <varpool_node *>
2664 (symtab_node::ultimate_alias_target (availability));
2665
2666 if (!n && availability)
2667 *availability = AVAIL_NOT_AVAILABLE;
2668 return n;
2669 }
2670
2671 /* Return true when the edge represents a direct recursion. */
2672 inline bool
2673 cgraph_edge::recursive_p (void)
2674 {
2675 cgraph_node *c = callee->ultimate_alias_target ();
2676 if (caller->global.inlined_to)
2677 return caller->global.inlined_to->decl == c->decl;
2678 else
2679 return caller->decl == c->decl;
2680 }
2681
2682 /* Return true if the TM_CLONE bit is set for a given FNDECL. */
2683 static inline bool
2684 decl_is_tm_clone (const_tree fndecl)
2685 {
2686 cgraph_node *n = cgraph_node::get (fndecl);
2687 if (n)
2688 return n->tm_clone;
2689 return false;
2690 }
2691
2692 /* Likewise indicate that a node is needed, i.e. reachable via some
2693 external means. */
2694
2695 inline void
2696 cgraph_node::mark_force_output (void)
2697 {
2698 force_output = 1;
2699 gcc_checking_assert (!global.inlined_to);
2700 }
2701
2702 inline symtab_node * symtab_node::get_create (tree node)
2703 {
2704 if (TREE_CODE (node) == VAR_DECL)
2705 return varpool_node::get_create (node);
2706 else
2707 return cgraph_node::get_create (node);
2708 }
2709
2710 /* Build polymorphic call context for indirect call E. */
2711
2712 inline
2713 ipa_polymorphic_call_context::ipa_polymorphic_call_context (cgraph_edge *e)
2714 {
2715 gcc_checking_assert (e->indirect_info->polymorphic);
2716 *this = e->indirect_info->context;
2717 }
2718
2719 /* Build empty "I know nothing" context. */
2720
2721 inline
2722 ipa_polymorphic_call_context::ipa_polymorphic_call_context ()
2723 {
2724 clear_speculation ();
2725 clear_outer_type ();
2726 invalid = false;
2727 }
2728
2729 /* Make context non-speculative. */
2730
2731 inline void
2732 ipa_polymorphic_call_context::clear_speculation ()
2733 {
2734 speculative_outer_type = NULL;
2735 speculative_offset = 0;
2736 speculative_maybe_derived_type = false;
2737 }
2738
2739 /* Produce context specifying all derrived types of OTR_TYPE.
2740 If OTR_TYPE is NULL or type of the OBJ_TYPE_REF, the context is set
2741 to dummy "I know nothing" setting. */
2742
2743 inline void
2744 ipa_polymorphic_call_context::clear_outer_type (tree otr_type)
2745 {
2746 outer_type = otr_type ? TYPE_MAIN_VARIANT (otr_type) : NULL;
2747 offset = 0;
2748 maybe_derived_type = true;
2749 maybe_in_construction = true;
2750 dynamic = true;
2751 }
2752
2753 /* Adjust all offsets in contexts by OFF bits. */
2754
2755 inline void
2756 ipa_polymorphic_call_context::offset_by (HOST_WIDE_INT off)
2757 {
2758 if (outer_type)
2759 offset += off;
2760 if (speculative_outer_type)
2761 speculative_offset += off;
2762 }
2763
2764 /* Return TRUE if context is fully useless. */
2765
2766 inline bool
2767 ipa_polymorphic_call_context::useless_p () const
2768 {
2769 return (!outer_type && !speculative_outer_type);
2770 }
2771 #endif /* GCC_CGRAPH_H */