move many gc hashtab to hash_table
[gcc.git] / gcc / cgraph.h
1 /* Callgraph handling code.
2 Copyright (C) 2003-2014 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #ifndef GCC_CGRAPH_H
22 #define GCC_CGRAPH_H
23
24 #include "hash-map.h"
25 #include "is-a.h"
26 #include "plugin-api.h"
27 #include "vec.h"
28 #include "basic-block.h"
29 #include "function.h"
30 #include "ipa-ref.h"
31 #include "dumpfile.h"
32
33 /* Symbol table consists of functions and variables.
34 TODO: add labels and CONST_DECLs. */
35 enum symtab_type
36 {
37 SYMTAB_SYMBOL,
38 SYMTAB_FUNCTION,
39 SYMTAB_VARIABLE
40 };
41
42 /* Section names are stored as reference counted strings in GGC safe hashtable
43 (to make them survive through PCH). */
44
45 struct GTY((for_user)) section_hash_entry_d
46 {
47 int ref_count;
48 char *name; /* As long as this datastructure stays in GGC, we can not put
49 string at the tail of structure of GGC dies in horrible
50 way */
51 };
52
53 typedef struct section_hash_entry_d section_hash_entry;
54
55 struct section_name_hasher : ggc_hasher<section_hash_entry *>
56 {
57 typedef const char *compare_type;
58
59 static hashval_t hash (section_hash_entry *);
60 static bool equal (section_hash_entry *, const char *);
61 };
62
63 enum availability
64 {
65 /* Not yet set by cgraph_function_body_availability. */
66 AVAIL_UNSET,
67 /* Function body/variable initializer is unknown. */
68 AVAIL_NOT_AVAILABLE,
69 /* Function body/variable initializer is known but might be replaced
70 by a different one from other compilation unit and thus needs to
71 be dealt with a care. Like AVAIL_NOT_AVAILABLE it can have
72 arbitrary side effects on escaping variables and functions, while
73 like AVAILABLE it might access static variables. */
74 AVAIL_INTERPOSABLE,
75 /* Function body/variable initializer is known and will be used in final
76 program. */
77 AVAIL_AVAILABLE,
78 /* Function body/variable initializer is known and all it's uses are
79 explicitly visible within current unit (ie it's address is never taken and
80 it is not exported to other units). Currently used only for functions. */
81 AVAIL_LOCAL
82 };
83
84 /* Classification of symbols WRT partitioning. */
85 enum symbol_partitioning_class
86 {
87 /* External declarations are ignored by partitioning algorithms and they are
88 added into the boundary later via compute_ltrans_boundary. */
89 SYMBOL_EXTERNAL,
90 /* Partitioned symbols are pur into one of partitions. */
91 SYMBOL_PARTITION,
92 /* Duplicated symbols (such as comdat or constant pool references) are
93 copied into every node needing them via add_symbol_to_partition. */
94 SYMBOL_DUPLICATE
95 };
96
97 /* Base of all entries in the symbol table.
98 The symtab_node is inherited by cgraph and varpol nodes. */
99 class GTY((desc ("%h.type"), tag ("SYMTAB_SYMBOL"),
100 chain_next ("%h.next"), chain_prev ("%h.previous")))
101 symtab_node
102 {
103 public:
104 /* Return name. */
105 const char *name () const;
106
107 /* Return asm name. */
108 const char * asm_name () const;
109
110 /* Add node into symbol table. This function is not used directly, but via
111 cgraph/varpool node creation routines. */
112 void register_symbol (void);
113
114 /* Remove symbol from symbol table. */
115 void remove (void);
116
117 /* Dump symtab node to F. */
118 void dump (FILE *f);
119
120 /* Dump symtab node to stderr. */
121 void DEBUG_FUNCTION debug (void);
122
123 /* Verify consistency of node. */
124 void DEBUG_FUNCTION verify (void);
125
126 /* Return ipa reference from this symtab_node to
127 REFERED_NODE or REFERED_VARPOOL_NODE. USE_TYPE specify type
128 of the use and STMT the statement (if it exists). */
129 ipa_ref *create_reference (symtab_node *referred_node,
130 enum ipa_ref_use use_type);
131
132 /* Return ipa reference from this symtab_node to
133 REFERED_NODE or REFERED_VARPOOL_NODE. USE_TYPE specify type
134 of the use and STMT the statement (if it exists). */
135 ipa_ref *create_reference (symtab_node *referred_node,
136 enum ipa_ref_use use_type, gimple stmt);
137
138 /* If VAL is a reference to a function or a variable, add a reference from
139 this symtab_node to the corresponding symbol table node. USE_TYPE specify
140 type of the use and STMT the statement (if it exists). Return the new
141 reference or NULL if none was created. */
142 ipa_ref *maybe_create_reference (tree val, enum ipa_ref_use use_type,
143 gimple stmt);
144
145 /* Clone all references from symtab NODE to this symtab_node. */
146 void clone_references (symtab_node *node);
147
148 /* Remove all stmt references in non-speculative references.
149 Those are not maintained during inlining & clonning.
150 The exception are speculative references that are updated along
151 with callgraph edges associated with them. */
152 void clone_referring (symtab_node *node);
153
154 /* Clone reference REF to this symtab_node and set its stmt to STMT. */
155 ipa_ref *clone_reference (ipa_ref *ref, gimple stmt);
156
157 /* Find the structure describing a reference to REFERRED_NODE
158 and associated with statement STMT. */
159 ipa_ref *find_reference (symtab_node *referred_node, gimple stmt,
160 unsigned int lto_stmt_uid);
161
162 /* Remove all references that are associated with statement STMT. */
163 void remove_stmt_references (gimple stmt);
164
165 /* Remove all stmt references in non-speculative references.
166 Those are not maintained during inlining & clonning.
167 The exception are speculative references that are updated along
168 with callgraph edges associated with them. */
169 void clear_stmts_in_references (void);
170
171 /* Remove all references in ref list. */
172 void remove_all_references (void);
173
174 /* Remove all referring items in ref list. */
175 void remove_all_referring (void);
176
177 /* Dump references in ref list to FILE. */
178 void dump_references (FILE *file);
179
180 /* Dump referring in list to FILE. */
181 void dump_referring (FILE *);
182
183 /* Iterates I-th reference in the list, REF is also set. */
184 ipa_ref *iterate_reference (unsigned i, ipa_ref *&ref);
185
186 /* Iterates I-th referring item in the list, REF is also set. */
187 ipa_ref *iterate_referring (unsigned i, ipa_ref *&ref);
188
189 /* Iterates I-th referring alias item in the list, REF is also set. */
190 ipa_ref *iterate_direct_aliases (unsigned i, ipa_ref *&ref);
191
192 /* Return true if symtab node and TARGET represents
193 semantically equivalent symbols. */
194 bool semantically_equivalent_p (symtab_node *target);
195
196 /* Classify symbol symtab node for partitioning. */
197 enum symbol_partitioning_class get_partitioning_class (void);
198
199 /* Return comdat group. */
200 tree get_comdat_group ()
201 {
202 return x_comdat_group;
203 }
204
205 /* Return comdat group as identifier_node. */
206 tree get_comdat_group_id ()
207 {
208 if (x_comdat_group && TREE_CODE (x_comdat_group) != IDENTIFIER_NODE)
209 x_comdat_group = DECL_ASSEMBLER_NAME (x_comdat_group);
210 return x_comdat_group;
211 }
212
213 /* Set comdat group. */
214 void set_comdat_group (tree group)
215 {
216 gcc_checking_assert (!group || TREE_CODE (group) == IDENTIFIER_NODE
217 || DECL_P (group));
218 x_comdat_group = group;
219 }
220
221 /* Return section as string. */
222 const char * get_section ()
223 {
224 if (!x_section)
225 return NULL;
226 return x_section->name;
227 }
228
229 /* Remove node from same comdat group. */
230 void remove_from_same_comdat_group (void);
231
232 /* Add this symtab_node to the same comdat group that OLD is in. */
233 void add_to_same_comdat_group (symtab_node *old_node);
234
235 /* Dissolve the same_comdat_group list in which NODE resides. */
236 void dissolve_same_comdat_group_list (void);
237
238 /* Return true when symtab_node is known to be used from other (non-LTO)
239 object file. Known only when doing LTO via linker plugin. */
240 bool used_from_object_file_p (void);
241
242 /* Walk the alias chain to return the symbol NODE is alias of.
243 If NODE is not an alias, return NODE.
244 When AVAILABILITY is non-NULL, get minimal availability in the chain. */
245 symtab_node *ultimate_alias_target (enum availability *avail = NULL);
246
247 /* Return next reachable static symbol with initializer after NODE. */
248 inline symtab_node *next_defined_symbol (void);
249
250 /* Add reference recording that symtab node is alias of TARGET.
251 The function can fail in the case of aliasing cycles; in this case
252 it returns false. */
253 bool resolve_alias (symtab_node *target);
254
255 /* C++ FE sometimes change linkage flags after producing same
256 body aliases. */
257 void fixup_same_cpp_alias_visibility (symtab_node *target);
258
259 /* Call calback on symtab node and aliases associated to this node.
260 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
261 skipped. */
262 bool call_for_symbol_and_aliases (bool (*callback) (symtab_node *, void *),
263 void *data,
264 bool include_overwrite);
265
266 /* If node can not be interposable by static or dynamic linker to point to
267 different definition, return this symbol. Otherwise look for alias with
268 such property and if none exists, introduce new one. */
269 symtab_node *noninterposable_alias (void);
270
271 /* Return node that alias is aliasing. */
272 inline symtab_node *get_alias_target (void);
273
274 /* Set section for symbol and its aliases. */
275 void set_section (const char *section);
276
277 /* Set section, do not recurse into aliases.
278 When one wants to change section of symbol and its aliases,
279 use set_section. */
280 void set_section_for_node (const char *section);
281
282 /* Set initialization priority to PRIORITY. */
283 void set_init_priority (priority_type priority);
284
285 /* Return the initialization priority. */
286 priority_type get_init_priority ();
287
288 /* Return availability of NODE. */
289 enum availability get_availability (void);
290
291 /* Make DECL local. */
292 void make_decl_local (void);
293
294 /* Return true if list contains an alias. */
295 bool has_aliases_p (void);
296
297 /* Return true when the symbol is real symbol, i.e. it is not inline clone
298 or abstract function kept for debug info purposes only. */
299 bool real_symbol_p (void);
300
301 /* Determine if symbol declaration is needed. That is, visible to something
302 either outside this translation unit, something magic in the system
303 configury. This function is used just during symbol creation. */
304 bool needed_p (void);
305
306 /* Return true when there are references to the node. */
307 bool referred_to_p (void);
308
309 /* Return true if NODE can be discarded by linker from the binary. */
310 inline bool
311 can_be_discarded_p (void)
312 {
313 return (DECL_EXTERNAL (decl)
314 || (get_comdat_group ()
315 && resolution != LDPR_PREVAILING_DEF
316 && resolution != LDPR_PREVAILING_DEF_IRONLY
317 && resolution != LDPR_PREVAILING_DEF_IRONLY_EXP));
318 }
319
320 /* Return true if NODE is local to a particular COMDAT group, and must not
321 be named from outside the COMDAT. This is used for C++ decloned
322 constructors. */
323 inline bool comdat_local_p (void)
324 {
325 return (same_comdat_group && !TREE_PUBLIC (decl));
326 }
327
328 /* Return true if ONE and TWO are part of the same COMDAT group. */
329 inline bool in_same_comdat_group_p (symtab_node *target);
330
331 /* Return true when there is a reference to node and it is not vtable. */
332 bool address_taken_from_non_vtable_p (void);
333
334 /* Return true if symbol is known to be nonzero. */
335 bool nonzero_address ();
336
337 /* Return symbol table node associated with DECL, if any,
338 and NULL otherwise. */
339 static inline symtab_node *get (const_tree decl)
340 {
341 #ifdef ENABLE_CHECKING
342 /* Check that we are called for sane type of object - functions
343 and static or external variables. */
344 gcc_checking_assert (TREE_CODE (decl) == FUNCTION_DECL
345 || (TREE_CODE (decl) == VAR_DECL
346 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)
347 || in_lto_p)));
348 /* Check that the mapping is sane - perhaps this check can go away,
349 but at the moment frontends tends to corrupt the mapping by calling
350 memcpy/memset on the tree nodes. */
351 gcc_checking_assert (!decl->decl_with_vis.symtab_node
352 || decl->decl_with_vis.symtab_node->decl == decl);
353 #endif
354 return decl->decl_with_vis.symtab_node;
355 }
356
357 /* Try to find a symtab node for declaration DECL and if it does not
358 exist or if it corresponds to an inline clone, create a new one. */
359 static inline symtab_node * get_create (tree node);
360
361 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
362 Return NULL if there's no such node. */
363 static symtab_node *get_for_asmname (const_tree asmname);
364
365 /* Dump symbol table to F. */
366 static void dump_table (FILE *);
367
368 /* Dump symbol table to stderr. */
369 static inline DEBUG_FUNCTION void debug_symtab (void)
370 {
371 dump_table (stderr);
372 }
373
374 /* Verify symbol table for internal consistency. */
375 static DEBUG_FUNCTION void verify_symtab_nodes (void);
376
377 /* Return true when NODE is known to be used from other (non-LTO)
378 object file. Known only when doing LTO via linker plugin. */
379 static bool used_from_object_file_p_worker (symtab_node *node);
380
381 /* Type of the symbol. */
382 ENUM_BITFIELD (symtab_type) type : 8;
383
384 /* The symbols resolution. */
385 ENUM_BITFIELD (ld_plugin_symbol_resolution) resolution : 8;
386
387 /*** Flags representing the symbol type. ***/
388
389 /* True when symbol corresponds to a definition in current unit.
390 set via finalize_function or finalize_decl */
391 unsigned definition : 1;
392 /* True when symbol is an alias.
393 Set by ssemble_alias. */
394 unsigned alias : 1;
395 /* True when alias is a weakref. */
396 unsigned weakref : 1;
397 /* C++ frontend produce same body aliases and extra name aliases for
398 virtual functions and vtables that are obviously equivalent.
399 Those aliases are bit special, especially because C++ frontend
400 visibility code is so ugly it can not get them right at first time
401 and their visibility needs to be copied from their "masters" at
402 the end of parsing. */
403 unsigned cpp_implicit_alias : 1;
404 /* Set once the definition was analyzed. The list of references and
405 other properties are built during analysis. */
406 unsigned analyzed : 1;
407 /* Set for write-only variables. */
408 unsigned writeonly : 1;
409 /* Visibility of symbol was used for further optimization; do not
410 permit further changes. */
411 unsigned refuse_visibility_changes : 1;
412
413 /*** Visibility and linkage flags. ***/
414
415 /* Set when function is visible by other units. */
416 unsigned externally_visible : 1;
417 /* Don't reorder to other symbols having this set. */
418 unsigned no_reorder : 1;
419 /* The symbol will be assumed to be used in an invisible way (like
420 by an toplevel asm statement). */
421 unsigned force_output : 1;
422 /* Like FORCE_OUTPUT, but in the case it is ABI requiring the symbol to be
423 exported. Unlike FORCE_OUTPUT this flag gets cleared to symbols promoted
424 to static and it does not inhibit optimization. */
425 unsigned forced_by_abi : 1;
426 /* True when the name is known to be unique and thus it does not need mangling. */
427 unsigned unique_name : 1;
428 /* Specify whether the section was set by user or by
429 compiler via -ffunction-sections. */
430 unsigned implicit_section : 1;
431 /* True when body and other characteristics have been removed by
432 symtab_remove_unreachable_nodes. */
433 unsigned body_removed : 1;
434
435 /*** WHOPR Partitioning flags.
436 These flags are used at ltrans stage when only part of the callgraph is
437 available. ***/
438
439 /* Set when variable is used from other LTRANS partition. */
440 unsigned used_from_other_partition : 1;
441 /* Set when function is available in the other LTRANS partition.
442 During WPA output it is used to mark nodes that are present in
443 multiple partitions. */
444 unsigned in_other_partition : 1;
445
446
447
448 /*** other flags. ***/
449
450 /* Set when symbol has address taken. */
451 unsigned address_taken : 1;
452 /* Set when init priority is set. */
453 unsigned in_init_priority_hash : 1;
454
455
456 /* Ordering of all symtab entries. */
457 int order;
458
459 /* Declaration representing the symbol. */
460 tree decl;
461
462 /* Linked list of symbol table entries starting with symtab_nodes. */
463 symtab_node *next;
464 symtab_node *previous;
465
466 /* Linked list of symbols with the same asm name. There may be multiple
467 entries for single symbol name during LTO, because symbols are renamed
468 only after partitioning.
469
470 Because inline clones are kept in the assembler name has, they also produce
471 duplicate entries.
472
473 There are also several long standing bugs where frontends and builtin
474 code produce duplicated decls. */
475 symtab_node *next_sharing_asm_name;
476 symtab_node *previous_sharing_asm_name;
477
478 /* Circular list of nodes in the same comdat group if non-NULL. */
479 symtab_node *same_comdat_group;
480
481 /* Vectors of referring and referenced entities. */
482 ipa_ref_list ref_list;
483
484 /* Alias target. May be either DECL pointer or ASSEMBLER_NAME pointer
485 depending to what was known to frontend on the creation time.
486 Once alias is resolved, this pointer become NULL. */
487 tree alias_target;
488
489 /* File stream where this node is being written to. */
490 struct lto_file_decl_data * lto_file_data;
491
492 PTR GTY ((skip)) aux;
493
494 /* Comdat group the symbol is in. Can be private if GGC allowed that. */
495 tree x_comdat_group;
496
497 /* Section name. Again can be private, if allowed. */
498 section_hash_entry *x_section;
499
500 protected:
501 /* Dump base fields of symtab nodes to F. Not to be used directly. */
502 void dump_base (FILE *);
503
504 /* Verify common part of symtab node. */
505 bool DEBUG_FUNCTION verify_base (void);
506
507 /* Remove node from symbol table. This function is not used directly, but via
508 cgraph/varpool node removal routines. */
509 void unregister (void);
510
511 /* Return the initialization and finalization priority information for
512 DECL. If there is no previous priority information, a freshly
513 allocated structure is returned. */
514 struct symbol_priority_map *priority_info (void);
515
516 private:
517 /* Worker for set_section. */
518 static bool set_section (symtab_node *n, void *s);
519
520 /* Worker for symtab_resolve_alias. */
521 static bool set_implicit_section (symtab_node *n, void *);
522
523 /* Worker searching noninterposable alias. */
524 static bool noninterposable_alias (symtab_node *node, void *data);
525 };
526
527 /* Walk all aliases for NODE. */
528 #define FOR_EACH_ALIAS(node, alias) \
529 for (unsigned x_i = 0; node->iterate_direct_aliases (x_i, alias); x_i++)
530
531 /* This is the information that is put into the cgraph local structure
532 to recover a function. */
533 struct lto_file_decl_data;
534
535 extern const char * const cgraph_availability_names[];
536 extern const char * const ld_plugin_symbol_resolution_names[];
537 extern const char * const tls_model_names[];
538
539 /* Information about thunk, used only for same body aliases. */
540
541 struct GTY(()) cgraph_thunk_info {
542 /* Information about the thunk. */
543 HOST_WIDE_INT fixed_offset;
544 HOST_WIDE_INT virtual_value;
545 tree alias;
546 bool this_adjusting;
547 bool virtual_offset_p;
548 /* Set to true when alias node is thunk. */
549 bool thunk_p;
550 };
551
552 /* Information about the function collected locally.
553 Available after function is analyzed. */
554
555 struct GTY(()) cgraph_local_info {
556 /* Set when function function is visible in current compilation unit only
557 and its address is never taken. */
558 unsigned local : 1;
559
560 /* False when there is something makes versioning impossible. */
561 unsigned versionable : 1;
562
563 /* False when function calling convention and signature can not be changed.
564 This is the case when __builtin_apply_args is used. */
565 unsigned can_change_signature : 1;
566
567 /* True when the function has been originally extern inline, but it is
568 redefined now. */
569 unsigned redefined_extern_inline : 1;
570
571 /* True if the function may enter serial irrevocable mode. */
572 unsigned tm_may_enter_irr : 1;
573 };
574
575 /* Information about the function that needs to be computed globally
576 once compilation is finished. Available only with -funit-at-a-time. */
577
578 struct GTY(()) cgraph_global_info {
579 /* For inline clones this points to the function they will be
580 inlined into. */
581 cgraph_node *inlined_to;
582 };
583
584 /* Information about the function that is propagated by the RTL backend.
585 Available only for functions that has been already assembled. */
586
587 struct GTY(()) cgraph_rtl_info {
588 unsigned int preferred_incoming_stack_boundary;
589
590 /* Call unsaved hard registers really used by the corresponding
591 function (including ones used by functions called by the
592 function). */
593 HARD_REG_SET function_used_regs;
594 /* Set if function_used_regs is valid. */
595 unsigned function_used_regs_valid: 1;
596 };
597
598 /* Represent which DECL tree (or reference to such tree)
599 will be replaced by another tree while versioning. */
600 struct GTY(()) ipa_replace_map
601 {
602 /* The tree that will be replaced. */
603 tree old_tree;
604 /* The new (replacing) tree. */
605 tree new_tree;
606 /* Parameter number to replace, when old_tree is NULL. */
607 int parm_num;
608 /* True when a substitution should be done, false otherwise. */
609 bool replace_p;
610 /* True when we replace a reference to old_tree. */
611 bool ref_p;
612 };
613
614 struct GTY(()) cgraph_clone_info
615 {
616 vec<ipa_replace_map *, va_gc> *tree_map;
617 bitmap args_to_skip;
618 bitmap combined_args_to_skip;
619 };
620
621 enum cgraph_simd_clone_arg_type
622 {
623 SIMD_CLONE_ARG_TYPE_VECTOR,
624 SIMD_CLONE_ARG_TYPE_UNIFORM,
625 SIMD_CLONE_ARG_TYPE_LINEAR_CONSTANT_STEP,
626 SIMD_CLONE_ARG_TYPE_LINEAR_VARIABLE_STEP,
627 SIMD_CLONE_ARG_TYPE_MASK
628 };
629
630 /* Function arguments in the original function of a SIMD clone.
631 Supplementary data for `struct simd_clone'. */
632
633 struct GTY(()) cgraph_simd_clone_arg {
634 /* Original function argument as it originally existed in
635 DECL_ARGUMENTS. */
636 tree orig_arg;
637
638 /* orig_arg's function (or for extern functions type from
639 TYPE_ARG_TYPES). */
640 tree orig_type;
641
642 /* If argument is a vector, this holds the vector version of
643 orig_arg that after adjusting the argument types will live in
644 DECL_ARGUMENTS. Otherwise, this is NULL.
645
646 This basically holds:
647 vector(simdlen) __typeof__(orig_arg) new_arg. */
648 tree vector_arg;
649
650 /* vector_arg's type (or for extern functions new vector type. */
651 tree vector_type;
652
653 /* If argument is a vector, this holds the array where the simd
654 argument is held while executing the simd clone function. This
655 is a local variable in the cloned function. Its content is
656 copied from vector_arg upon entry to the clone.
657
658 This basically holds:
659 __typeof__(orig_arg) simd_array[simdlen]. */
660 tree simd_array;
661
662 /* A SIMD clone's argument can be either linear (constant or
663 variable), uniform, or vector. */
664 enum cgraph_simd_clone_arg_type arg_type;
665
666 /* For arg_type SIMD_CLONE_ARG_TYPE_LINEAR_CONSTANT_STEP this is
667 the constant linear step, if arg_type is
668 SIMD_CLONE_ARG_TYPE_LINEAR_VARIABLE_STEP, this is index of
669 the uniform argument holding the step, otherwise 0. */
670 HOST_WIDE_INT linear_step;
671
672 /* Variable alignment if available, otherwise 0. */
673 unsigned int alignment;
674 };
675
676 /* Specific data for a SIMD function clone. */
677
678 struct GTY(()) cgraph_simd_clone {
679 /* Number of words in the SIMD lane associated with this clone. */
680 unsigned int simdlen;
681
682 /* Number of annotated function arguments in `args'. This is
683 usually the number of named arguments in FNDECL. */
684 unsigned int nargs;
685
686 /* Max hardware vector size in bits for integral vectors. */
687 unsigned int vecsize_int;
688
689 /* Max hardware vector size in bits for floating point vectors. */
690 unsigned int vecsize_float;
691
692 /* The mangling character for a given vector size. This is is used
693 to determine the ISA mangling bit as specified in the Intel
694 Vector ABI. */
695 unsigned char vecsize_mangle;
696
697 /* True if this is the masked, in-branch version of the clone,
698 otherwise false. */
699 unsigned int inbranch : 1;
700
701 /* True if this is a Cilk Plus variant. */
702 unsigned int cilk_elemental : 1;
703
704 /* Doubly linked list of SIMD clones. */
705 cgraph_node *prev_clone, *next_clone;
706
707 /* Original cgraph node the SIMD clones were created for. */
708 cgraph_node *origin;
709
710 /* Annotated function arguments for the original function. */
711 cgraph_simd_clone_arg GTY((length ("%h.nargs"))) args[1];
712 };
713
714 /* Function Multiversioning info. */
715 struct GTY((for_user)) cgraph_function_version_info {
716 /* The cgraph_node for which the function version info is stored. */
717 cgraph_node *this_node;
718 /* Chains all the semantically identical function versions. The
719 first function in this chain is the version_info node of the
720 default function. */
721 cgraph_function_version_info *prev;
722 /* If this version node corresponds to a dispatcher for function
723 versions, this points to the version info node of the default
724 function, the first node in the chain. */
725 cgraph_function_version_info *next;
726 /* If this node corresponds to a function version, this points
727 to the dispatcher function decl, which is the function that must
728 be called to execute the right function version at run-time.
729
730 If this cgraph node is a dispatcher (if dispatcher_function is
731 true, in the cgraph_node struct) for function versions, this
732 points to resolver function, which holds the function body of the
733 dispatcher. The dispatcher decl is an alias to the resolver
734 function decl. */
735 tree dispatcher_resolver;
736 };
737
738 #define DEFCIFCODE(code, type, string) CIF_ ## code,
739 /* Reasons for inlining failures. */
740
741 enum cgraph_inline_failed_t {
742 #include "cif-code.def"
743 CIF_N_REASONS
744 };
745
746 enum cgraph_inline_failed_type_t
747 {
748 CIF_FINAL_NORMAL = 0,
749 CIF_FINAL_ERROR
750 };
751
752 struct cgraph_edge;
753
754 struct cgraph_edge_hasher : ggc_hasher<cgraph_edge *>
755 {
756 typedef gimple compare_type;
757
758 static hashval_t hash (cgraph_edge *);
759 static bool equal (cgraph_edge *, gimple);
760 };
761
762 /* The cgraph data structure.
763 Each function decl has assigned cgraph_node listing callees and callers. */
764
765 struct GTY((tag ("SYMTAB_FUNCTION"))) cgraph_node : public symtab_node {
766 public:
767 /* Remove the node from cgraph and all inline clones inlined into it.
768 Skip however removal of FORBIDDEN_NODE and return true if it needs to be
769 removed. This allows to call the function from outer loop walking clone
770 tree. */
771 bool remove_symbol_and_inline_clones (cgraph_node *forbidden_node = NULL);
772
773 /* Record all references from cgraph_node that are taken
774 in statement STMT. */
775 void record_stmt_references (gimple stmt);
776
777 /* Like cgraph_set_call_stmt but walk the clone tree and update all
778 clones sharing the same function body.
779 When WHOLE_SPECULATIVE_EDGES is true, all three components of
780 speculative edge gets updated. Otherwise we update only direct
781 call. */
782 void set_call_stmt_including_clones (gimple old_stmt, gimple new_stmt,
783 bool update_speculative = true);
784
785 /* Walk the alias chain to return the function cgraph_node is alias of.
786 Walk through thunk, too.
787 When AVAILABILITY is non-NULL, get minimal availability in the chain. */
788 cgraph_node *function_symbol (enum availability *avail = NULL);
789
790 /* Create node representing clone of N executed COUNT times. Decrease
791 the execution counts from original node too.
792 The new clone will have decl set to DECL that may or may not be the same
793 as decl of N.
794
795 When UPDATE_ORIGINAL is true, the counts are subtracted from the original
796 function's profile to reflect the fact that part of execution is handled
797 by node.
798 When CALL_DUPLICATOIN_HOOK is true, the ipa passes are acknowledged about
799 the new clone. Otherwise the caller is responsible for doing so later.
800
801 If the new node is being inlined into another one, NEW_INLINED_TO should be
802 the outline function the new one is (even indirectly) inlined to.
803 All hooks will see this in node's global.inlined_to, when invoked.
804 Can be NULL if the node is not inlined. */
805 cgraph_node *create_clone (tree decl, gcov_type count, int freq,
806 bool update_original,
807 vec<cgraph_edge *> redirect_callers,
808 bool call_duplication_hook,
809 cgraph_node *new_inlined_to,
810 bitmap args_to_skip);
811
812 /* Create callgraph node clone with new declaration. The actual body will
813 be copied later at compilation stage. */
814 cgraph_node *create_virtual_clone (vec<cgraph_edge *> redirect_callers,
815 vec<ipa_replace_map *, va_gc> *tree_map,
816 bitmap args_to_skip, const char * suffix);
817
818 /* cgraph node being removed from symbol table; see if its entry can be
819 replaced by other inline clone. */
820 cgraph_node *find_replacement (void);
821
822 /* Create a new cgraph node which is the new version of
823 callgraph node. REDIRECT_CALLERS holds the callers
824 edges which should be redirected to point to
825 NEW_VERSION. ALL the callees edges of the node
826 are cloned to the new version node. Return the new
827 version node.
828
829 If non-NULL BLOCK_TO_COPY determine what basic blocks
830 was copied to prevent duplications of calls that are dead
831 in the clone. */
832
833 cgraph_node *create_version_clone (tree new_decl,
834 vec<cgraph_edge *> redirect_callers,
835 bitmap bbs_to_copy);
836
837 /* Perform function versioning.
838 Function versioning includes copying of the tree and
839 a callgraph update (creating a new cgraph node and updating
840 its callees and callers).
841
842 REDIRECT_CALLERS varray includes the edges to be redirected
843 to the new version.
844
845 TREE_MAP is a mapping of tree nodes we want to replace with
846 new ones (according to results of prior analysis).
847
848 If non-NULL ARGS_TO_SKIP determine function parameters to remove
849 from new version.
850 If SKIP_RETURN is true, the new version will return void.
851 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
852 If non_NULL NEW_ENTRY determine new entry BB of the clone.
853
854 Return the new version's cgraph node. */
855 cgraph_node *create_version_clone_with_body
856 (vec<cgraph_edge *> redirect_callers,
857 vec<ipa_replace_map *, va_gc> *tree_map, bitmap args_to_skip,
858 bool skip_return, bitmap bbs_to_copy, basic_block new_entry_block,
859 const char *clone_name);
860
861 /* Insert a new cgraph_function_version_info node into cgraph_fnver_htab
862 corresponding to cgraph_node. */
863 cgraph_function_version_info *insert_new_function_version (void);
864
865 /* Get the cgraph_function_version_info node corresponding to node. */
866 cgraph_function_version_info *function_version (void);
867
868 /* Discover all functions and variables that are trivially needed, analyze
869 them as well as all functions and variables referred by them */
870 void analyze (void);
871
872 /* Add thunk alias into callgraph. The alias declaration is ALIAS and it
873 aliases DECL with an adjustments made into the first parameter.
874 See comments in thunk_adjust for detail on the parameters. */
875 cgraph_node * create_thunk (tree alias, tree, bool this_adjusting,
876 HOST_WIDE_INT fixed_offset,
877 HOST_WIDE_INT virtual_value,
878 tree virtual_offset,
879 tree real_alias);
880
881
882 /* Return node that alias is aliasing. */
883 inline cgraph_node *get_alias_target (void);
884
885 /* Given function symbol, walk the alias chain to return the function node
886 is alias of. Do not walk through thunks.
887 When AVAILABILITY is non-NULL, get minimal availability in the chain. */
888
889 cgraph_node *ultimate_alias_target (availability *availability = NULL);
890
891 /* Expand thunk NODE to gimple if possible.
892 When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
893 no assembler is produced.
894 When OUTPUT_ASM_THUNK is true, also produce assembler for
895 thunks that are not lowered. */
896 bool expand_thunk (bool output_asm_thunks, bool force_gimple_thunk);
897
898 /* Assemble thunks and aliases associated to node. */
899 void assemble_thunks_and_aliases (void);
900
901 /* Expand function specified by node. */
902 void expand (void);
903
904 /* As an GCC extension we allow redefinition of the function. The
905 semantics when both copies of bodies differ is not well defined.
906 We replace the old body with new body so in unit at a time mode
907 we always use new body, while in normal mode we may end up with
908 old body inlined into some functions and new body expanded and
909 inlined in others. */
910 void reset (void);
911
912 /* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
913 kind of wrapper method. */
914 void create_wrapper (cgraph_node *target);
915
916 /* Verify cgraph nodes of the cgraph node. */
917 void DEBUG_FUNCTION verify_node (void);
918
919 /* Remove function from symbol table. */
920 void remove (void);
921
922 /* Dump call graph node to file F. */
923 void dump (FILE *f);
924
925 /* Dump call graph node to stderr. */
926 void DEBUG_FUNCTION debug (void);
927
928 /* When doing LTO, read cgraph_node's body from disk if it is not already
929 present. */
930 bool get_body (void);
931
932 /* Release memory used to represent body of function.
933 Use this only for functions that are released before being translated to
934 target code (i.e. RTL). Functions that are compiled to RTL and beyond
935 are free'd in final.c via free_after_compilation(). */
936 void release_body (bool keep_arguments = false);
937
938 /* Return the DECL_STRUCT_FUNCTION of the function. */
939 struct function *get_fun (void);
940
941 /* cgraph_node is no longer nested function; update cgraph accordingly. */
942 void unnest (void);
943
944 /* Bring cgraph node local. */
945 void make_local (void);
946
947 /* Likewise indicate that a node is having address taken. */
948 void mark_address_taken (void);
949
950 /* Set fialization priority to PRIORITY. */
951 void set_fini_priority (priority_type priority);
952
953 /* Return the finalization priority. */
954 priority_type get_fini_priority (void);
955
956 /* Create edge from a given function to CALLEE in the cgraph. */
957 cgraph_edge *create_edge (cgraph_node *callee,
958 gimple call_stmt, gcov_type count,
959 int freq);
960
961 /* Create an indirect edge with a yet-undetermined callee where the call
962 statement destination is a formal parameter of the caller with index
963 PARAM_INDEX. */
964 cgraph_edge *create_indirect_edge (gimple call_stmt, int ecf_flags,
965 gcov_type count, int freq,
966 bool compute_indirect_info = true);
967
968 /* Like cgraph_create_edge walk the clone tree and update all clones sharing
969 same function body. If clones already have edge for OLD_STMT; only
970 update the edge same way as cgraph_set_call_stmt_including_clones does. */
971 void create_edge_including_clones (cgraph_node *callee,
972 gimple old_stmt, gimple stmt,
973 gcov_type count,
974 int freq,
975 cgraph_inline_failed_t reason);
976
977 /* Return the callgraph edge representing the GIMPLE_CALL statement
978 CALL_STMT. */
979 cgraph_edge *get_edge (gimple call_stmt);
980
981 /* Collect all callers of cgraph_node and its aliases that are known to lead
982 to NODE (i.e. are not overwritable). */
983 vec<cgraph_edge *> collect_callers (void);
984
985 /* Remove all callers from the node. */
986 void remove_callers (void);
987
988 /* Remove all callees from the node. */
989 void remove_callees (void);
990
991 /* Return function availability. See cgraph.h for description of individual
992 return values. */
993 enum availability get_availability (void);
994
995 /* Set TREE_NOTHROW on cgraph_node's decl and on aliases of the node
996 if any to NOTHROW. */
997 void set_nothrow_flag (bool nothrow);
998
999 /* Set TREE_READONLY on cgraph_node's decl and on aliases of the node
1000 if any to READONLY. */
1001 void set_const_flag (bool readonly, bool looping);
1002
1003 /* Set DECL_PURE_P on cgraph_node's decl and on aliases of the node
1004 if any to PURE. */
1005 void set_pure_flag (bool pure, bool looping);
1006
1007 /* Call calback on function and aliases associated to the function.
1008 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
1009 skipped. */
1010
1011 bool call_for_symbol_and_aliases (bool (*callback) (cgraph_node *,
1012 void *),
1013 void *data, bool include_overwritable);
1014
1015 /* Call calback on cgraph_node, thunks and aliases associated to NODE.
1016 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
1017 skipped. */
1018 bool call_for_symbol_thunks_and_aliases (bool (*callback) (cgraph_node *node,
1019 void *data),
1020 void *data,
1021 bool include_overwritable);
1022
1023 /* Likewise indicate that a node is needed, i.e. reachable via some
1024 external means. */
1025 inline void mark_force_output (void);
1026
1027 /* Return true when function can be marked local. */
1028 bool local_p (void);
1029
1030 /* Return true if cgraph_node can be made local for API change.
1031 Extern inline functions and C++ COMDAT functions can be made local
1032 at the expense of possible code size growth if function is used in multiple
1033 compilation units. */
1034 bool can_be_local_p (void);
1035
1036 /* Return true when cgraph_node can not return or throw and thus
1037 it is safe to ignore its side effects for IPA analysis. */
1038 bool cannot_return_p (void);
1039
1040 /* Return true when function cgraph_node and all its aliases are only called
1041 directly.
1042 i.e. it is not externally visible, address was not taken and
1043 it is not used in any other non-standard way. */
1044 bool only_called_directly_p (void);
1045
1046 /* Return true when function is only called directly or it has alias.
1047 i.e. it is not externally visible, address was not taken and
1048 it is not used in any other non-standard way. */
1049 inline bool only_called_directly_or_aliased_p (void);
1050
1051 /* Return true when function cgraph_node can be expected to be removed
1052 from program when direct calls in this compilation unit are removed.
1053
1054 As a special case COMDAT functions are
1055 cgraph_can_remove_if_no_direct_calls_p while the are not
1056 cgraph_only_called_directly_p (it is possible they are called from other
1057 unit)
1058
1059 This function behaves as cgraph_only_called_directly_p because eliminating
1060 all uses of COMDAT function does not make it necessarily disappear from
1061 the program unless we are compiling whole program or we do LTO. In this
1062 case we know we win since dynamic linking will not really discard the
1063 linkonce section. */
1064 bool will_be_removed_from_program_if_no_direct_calls_p (void);
1065
1066 /* Return true when function can be removed from callgraph
1067 if all direct calls are eliminated. */
1068 bool can_remove_if_no_direct_calls_and_refs_p (void);
1069
1070 /* Return true when function cgraph_node and its aliases can be removed from
1071 callgraph if all direct calls are eliminated. */
1072 bool can_remove_if_no_direct_calls_p (void);
1073
1074 /* Return true when callgraph node is a function with Gimple body defined
1075 in current unit. Functions can also be define externally or they
1076 can be thunks with no Gimple representation.
1077
1078 Note that at WPA stage, the function body may not be present in memory. */
1079 inline bool has_gimple_body_p (void);
1080
1081 /* Return true if function should be optimized for size. */
1082 bool optimize_for_size_p (void);
1083
1084 /* Dump the callgraph to file F. */
1085 static void dump_cgraph (FILE *f);
1086
1087 /* Dump the call graph to stderr. */
1088 static inline
1089 void debug_cgraph (void)
1090 {
1091 dump_cgraph (stderr);
1092 }
1093
1094 /* Record that DECL1 and DECL2 are semantically identical function
1095 versions. */
1096 static void record_function_versions (tree decl1, tree decl2);
1097
1098 /* Remove the cgraph_function_version_info and cgraph_node for DECL. This
1099 DECL is a duplicate declaration. */
1100 static void delete_function_version (tree decl);
1101
1102 /* Add the function FNDECL to the call graph.
1103 Unlike finalize_function, this function is intended to be used
1104 by middle end and allows insertion of new function at arbitrary point
1105 of compilation. The function can be either in high, low or SSA form
1106 GIMPLE.
1107
1108 The function is assumed to be reachable and have address taken (so no
1109 API breaking optimizations are performed on it).
1110
1111 Main work done by this function is to enqueue the function for later
1112 processing to avoid need the passes to be re-entrant. */
1113 static void add_new_function (tree fndecl, bool lowered);
1114
1115 /* Return callgraph node for given symbol and check it is a function. */
1116 static inline cgraph_node *get (const_tree decl)
1117 {
1118 gcc_checking_assert (TREE_CODE (decl) == FUNCTION_DECL);
1119 return dyn_cast <cgraph_node *> (symtab_node::get (decl));
1120 }
1121
1122 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
1123 logic in effect. If NO_COLLECT is true, then our caller cannot stand to
1124 have the garbage collector run at the moment. We would need to either
1125 create a new GC context, or just not compile right now. */
1126 static void finalize_function (tree, bool);
1127
1128 /* Return cgraph node assigned to DECL. Create new one when needed. */
1129 static cgraph_node * create (tree decl);
1130
1131 /* Try to find a call graph node for declaration DECL and if it does not
1132 exist or if it corresponds to an inline clone, create a new one. */
1133 static cgraph_node * get_create (tree);
1134
1135 /* Return local info for the compiled function. */
1136 static cgraph_local_info *local_info (tree decl);
1137
1138 /* Return global info for the compiled function. */
1139 static cgraph_global_info *global_info (tree);
1140
1141 /* Return local info for the compiled function. */
1142 static cgraph_rtl_info *rtl_info (tree);
1143
1144 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
1145 Return NULL if there's no such node. */
1146 static cgraph_node *get_for_asmname (tree asmname);
1147
1148 /* Attempt to mark ALIAS as an alias to DECL. Return alias node if
1149 successful and NULL otherwise.
1150 Same body aliases are output whenever the body of DECL is output,
1151 and cgraph_node::get (ALIAS) transparently
1152 returns cgraph_node::get (DECL). */
1153 static cgraph_node * create_same_body_alias (tree alias, tree decl);
1154
1155 /* Worker for cgraph_can_remove_if_no_direct_calls_p. */
1156 static bool used_from_object_file_p_worker (cgraph_node *node, void *)
1157 {
1158 return node->used_from_object_file_p ();
1159 }
1160
1161 /* Return true when cgraph_node can not be local.
1162 Worker for cgraph_local_node_p. */
1163 static bool non_local_p (cgraph_node *node, void *);
1164
1165 /* Verify whole cgraph structure. */
1166 static void DEBUG_FUNCTION verify_cgraph_nodes (void);
1167
1168 /* Worker to bring NODE local. */
1169 static bool make_local (cgraph_node *node, void *);
1170
1171 /* Mark ALIAS as an alias to DECL. DECL_NODE is cgraph node representing
1172 the function body is associated
1173 with (not necessarily cgraph_node (DECL). */
1174 static cgraph_node *create_alias (tree alias, tree target);
1175
1176 cgraph_edge *callees;
1177 cgraph_edge *callers;
1178 /* List of edges representing indirect calls with a yet undetermined
1179 callee. */
1180 cgraph_edge *indirect_calls;
1181 /* For nested functions points to function the node is nested in. */
1182 cgraph_node *origin;
1183 /* Points to first nested function, if any. */
1184 cgraph_node *nested;
1185 /* Pointer to the next function with same origin, if any. */
1186 cgraph_node *next_nested;
1187 /* Pointer to the next clone. */
1188 cgraph_node *next_sibling_clone;
1189 cgraph_node *prev_sibling_clone;
1190 cgraph_node *clones;
1191 cgraph_node *clone_of;
1192 /* For functions with many calls sites it holds map from call expression
1193 to the edge to speed up cgraph_edge function. */
1194 hash_table<cgraph_edge_hasher> *GTY(()) call_site_hash;
1195 /* Declaration node used to be clone of. */
1196 tree former_clone_of;
1197
1198 /* If this is a SIMD clone, this points to the SIMD specific
1199 information for it. */
1200 cgraph_simd_clone *simdclone;
1201 /* If this function has SIMD clones, this points to the first clone. */
1202 cgraph_node *simd_clones;
1203
1204 /* Interprocedural passes scheduled to have their transform functions
1205 applied next time we execute local pass on them. We maintain it
1206 per-function in order to allow IPA passes to introduce new functions. */
1207 vec<ipa_opt_pass> GTY((skip)) ipa_transforms_to_apply;
1208
1209 cgraph_local_info local;
1210 cgraph_global_info global;
1211 cgraph_rtl_info rtl;
1212 cgraph_clone_info clone;
1213 cgraph_thunk_info thunk;
1214
1215 /* Expected number of executions: calculated in profile.c. */
1216 gcov_type count;
1217 /* How to scale counts at materialization time; used to merge
1218 LTO units with different number of profile runs. */
1219 int count_materialization_scale;
1220 /* Unique id of the node. */
1221 int uid;
1222 /* ID assigned by the profiling. */
1223 unsigned int profile_id;
1224 /* Time profiler: first run of function. */
1225 int tp_first_run;
1226
1227 /* Set when decl is an abstract function pointed to by the
1228 ABSTRACT_DECL_ORIGIN of a reachable function. */
1229 unsigned used_as_abstract_origin : 1;
1230 /* Set once the function is lowered (i.e. its CFG is built). */
1231 unsigned lowered : 1;
1232 /* Set once the function has been instantiated and its callee
1233 lists created. */
1234 unsigned process : 1;
1235 /* How commonly executed the node is. Initialized during branch
1236 probabilities pass. */
1237 ENUM_BITFIELD (node_frequency) frequency : 2;
1238 /* True when function can only be called at startup (from static ctor). */
1239 unsigned only_called_at_startup : 1;
1240 /* True when function can only be called at startup (from static dtor). */
1241 unsigned only_called_at_exit : 1;
1242 /* True when function is the transactional clone of a function which
1243 is called only from inside transactions. */
1244 /* ?? We should be able to remove this. We have enough bits in
1245 cgraph to calculate it. */
1246 unsigned tm_clone : 1;
1247 /* True if this decl is a dispatcher for function versions. */
1248 unsigned dispatcher_function : 1;
1249 /* True if this decl calls a COMDAT-local function. This is set up in
1250 compute_inline_parameters and inline_call. */
1251 unsigned calls_comdat_local : 1;
1252 };
1253
1254 /* A cgraph node set is a collection of cgraph nodes. A cgraph node
1255 can appear in multiple sets. */
1256 struct cgraph_node_set_def
1257 {
1258 hash_map<cgraph_node *, size_t> *map;
1259 vec<cgraph_node *> nodes;
1260 };
1261
1262 typedef cgraph_node_set_def *cgraph_node_set;
1263 typedef struct varpool_node_set_def *varpool_node_set;
1264
1265 class varpool_node;
1266
1267 /* A varpool node set is a collection of varpool nodes. A varpool node
1268 can appear in multiple sets. */
1269 struct varpool_node_set_def
1270 {
1271 hash_map<varpool_node *, size_t> * map;
1272 vec<varpool_node *> nodes;
1273 };
1274
1275 /* Iterator structure for cgraph node sets. */
1276 struct cgraph_node_set_iterator
1277 {
1278 cgraph_node_set set;
1279 unsigned index;
1280 };
1281
1282 /* Iterator structure for varpool node sets. */
1283 struct varpool_node_set_iterator
1284 {
1285 varpool_node_set set;
1286 unsigned index;
1287 };
1288
1289 /* Context of polymorphic call. It represent information about the type of
1290 instance that may reach the call. This is used by ipa-devirt walkers of the
1291 type inheritance graph. */
1292
1293 class GTY(()) ipa_polymorphic_call_context {
1294 public:
1295 /* The called object appears in an object of type OUTER_TYPE
1296 at offset OFFSET. When information is not 100% reliable, we
1297 use SPECULATIVE_OUTER_TYPE and SPECULATIVE_OFFSET. */
1298 HOST_WIDE_INT offset;
1299 HOST_WIDE_INT speculative_offset;
1300 tree outer_type;
1301 tree speculative_outer_type;
1302 /* True if outer object may be in construction or destruction. */
1303 unsigned maybe_in_construction : 1;
1304 /* True if outer object may be of derived type. */
1305 unsigned maybe_derived_type : 1;
1306 /* True if speculative outer object may be of derived type. We always
1307 speculate that construction does not happen. */
1308 unsigned speculative_maybe_derived_type : 1;
1309 /* True if the context is invalid and all calls should be redirected
1310 to BUILTIN_UNREACHABLE. */
1311 unsigned invalid : 1;
1312 /* True if the outer type is dynamic. */
1313 unsigned dynamic : 1;
1314
1315 /* Build empty "I know nothing" context. */
1316 ipa_polymorphic_call_context ();
1317 /* Build polymorphic call context for indirect call E. */
1318 ipa_polymorphic_call_context (cgraph_edge *e);
1319 /* Build polymorphic call context for IP invariant CST.
1320 If specified, OTR_TYPE specify the type of polymorphic call
1321 that takes CST+OFFSET as a prameter. */
1322 ipa_polymorphic_call_context (tree cst, tree otr_type = NULL,
1323 HOST_WIDE_INT offset = 0);
1324 /* Build context for pointer REF contained in FNDECL at statement STMT.
1325 if INSTANCE is non-NULL, return pointer to the object described by
1326 the context. */
1327 ipa_polymorphic_call_context (tree fndecl, tree ref, gimple stmt,
1328 tree *instance = NULL);
1329
1330 /* Look for vtable stores or constructor calls to work out dynamic type
1331 of memory location. */
1332 bool get_dynamic_type (tree, tree, tree, gimple);
1333
1334 /* Make context non-speculative. */
1335 void clear_speculation ();
1336
1337 /* Walk container types and modify context to point to actual class
1338 containing OTR_TYPE (if non-NULL) as base class.
1339 Return true if resulting context is valid.
1340
1341 When CONSIDER_PLACEMENT_NEW is false, reject contexts that may be made
1342 valid only via alocation of new polymorphic type inside by means
1343 of placement new.
1344
1345 When CONSIDER_BASES is false, only look for actual fields, not base types
1346 of TYPE. */
1347 bool restrict_to_inner_class (tree otr_type,
1348 bool consider_placement_new = true,
1349 bool consider_bases = true);
1350
1351 /* Adjust all offsets in contexts by given number of bits. */
1352 void offset_by (HOST_WIDE_INT);
1353 /* Use when we can not track dynamic type change. This speculatively assume
1354 type change is not happening. */
1355 void possible_dynamic_type_change (bool, tree otr_type = NULL);
1356 /* Assume that both THIS and a given context is valid and strenghten THIS
1357 if possible. Return true if any strenghtening was made.
1358 If actual type the context is being used in is known, OTR_TYPE should be
1359 set accordingly. This improves quality of combined result. */
1360 bool combine_with (ipa_polymorphic_call_context, tree otr_type = NULL);
1361
1362 /* Return TRUE if context is fully useless. */
1363 bool useless_p () const;
1364
1365 /* Dump human readable context to F. */
1366 void dump (FILE *f) const;
1367 void DEBUG_FUNCTION debug () const;
1368
1369 /* LTO streaming. */
1370 void stream_out (struct output_block *) const;
1371 void stream_in (struct lto_input_block *, struct data_in *data_in);
1372
1373 private:
1374 bool combine_speculation_with (tree, HOST_WIDE_INT, bool, tree);
1375 void set_by_decl (tree, HOST_WIDE_INT);
1376 bool set_by_invariant (tree, tree, HOST_WIDE_INT);
1377 void clear_outer_type (tree otr_type = NULL);
1378 bool speculation_consistent_p (tree, HOST_WIDE_INT, bool, tree);
1379 void make_speculative (tree otr_type = NULL);
1380 };
1381
1382 /* Structure containing additional information about an indirect call. */
1383
1384 struct GTY(()) cgraph_indirect_call_info
1385 {
1386 /* When agg_content is set, an offset where the call pointer is located
1387 within the aggregate. */
1388 HOST_WIDE_INT offset;
1389 /* Context of the polymorphic call; use only when POLYMORPHIC flag is set. */
1390 ipa_polymorphic_call_context context;
1391 /* OBJ_TYPE_REF_TOKEN of a polymorphic call (if polymorphic is set). */
1392 HOST_WIDE_INT otr_token;
1393 /* Type of the object from OBJ_TYPE_REF_OBJECT. */
1394 tree otr_type;
1395 /* Index of the parameter that is called. */
1396 int param_index;
1397 /* ECF flags determined from the caller. */
1398 int ecf_flags;
1399 /* Profile_id of common target obtrained from profile. */
1400 int common_target_id;
1401 /* Probability that call will land in function with COMMON_TARGET_ID. */
1402 int common_target_probability;
1403
1404 /* Set when the call is a virtual call with the parameter being the
1405 associated object pointer rather than a simple direct call. */
1406 unsigned polymorphic : 1;
1407 /* Set when the call is a call of a pointer loaded from contents of an
1408 aggregate at offset. */
1409 unsigned agg_contents : 1;
1410 /* Set when this is a call through a member pointer. */
1411 unsigned member_ptr : 1;
1412 /* When the previous bit is set, this one determines whether the destination
1413 is loaded from a parameter passed by reference. */
1414 unsigned by_ref : 1;
1415 /* For polymorphic calls this specify whether the virtual table pointer
1416 may have changed in between function entry and the call. */
1417 unsigned vptr_changed : 1;
1418 };
1419
1420 struct GTY((chain_next ("%h.next_caller"), chain_prev ("%h.prev_caller"),
1421 for_user)) cgraph_edge {
1422 friend class cgraph_node;
1423
1424 /* Remove the edge in the cgraph. */
1425 void remove (void);
1426
1427 /* Change field call_stmt of edge to NEW_STMT.
1428 If UPDATE_SPECULATIVE and E is any component of speculative
1429 edge, then update all components. */
1430 void set_call_stmt (gimple new_stmt, bool update_speculative = true);
1431
1432 /* Redirect callee of the edge to N. The function does not update underlying
1433 call expression. */
1434 void redirect_callee (cgraph_node *n);
1435
1436 /* Make an indirect edge with an unknown callee an ordinary edge leading to
1437 CALLEE. DELTA is an integer constant that is to be added to the this
1438 pointer (first parameter) to compensate for skipping
1439 a thunk adjustment. */
1440 cgraph_edge *make_direct (cgraph_node *callee);
1441
1442 /* Turn edge into speculative call calling N2. Update
1443 the profile so the direct call is taken COUNT times
1444 with FREQUENCY. */
1445 cgraph_edge *make_speculative (cgraph_node *n2, gcov_type direct_count,
1446 int direct_frequency);
1447
1448 /* Given speculative call edge, return all three components. */
1449 void speculative_call_info (cgraph_edge *&direct, cgraph_edge *&indirect,
1450 ipa_ref *&reference);
1451
1452 /* Speculative call edge turned out to be direct call to CALLE_DECL.
1453 Remove the speculative call sequence and return edge representing the call.
1454 It is up to caller to redirect the call as appropriate. */
1455 cgraph_edge *resolve_speculation (tree callee_decl = NULL);
1456
1457 /* If necessary, change the function declaration in the call statement
1458 associated with the edge so that it corresponds to the edge callee. */
1459 gimple redirect_call_stmt_to_callee (void);
1460
1461 /* Create clone of edge in the node N represented
1462 by CALL_EXPR the callgraph. */
1463 cgraph_edge * clone (cgraph_node *n, gimple call_stmt, unsigned stmt_uid,
1464 gcov_type count_scale, int freq_scale, bool update_original);
1465
1466 /* Return true when call of edge can not lead to return from caller
1467 and thus it is safe to ignore its side effects for IPA analysis
1468 when computing side effects of the caller. */
1469 bool cannot_lead_to_return_p (void);
1470
1471 /* Return true when the edge represents a direct recursion. */
1472 bool recursive_p (void);
1473
1474 /* Return true if the call can be hot. */
1475 bool maybe_hot_p (void);
1476
1477 /* Rebuild cgraph edges for current function node. This needs to be run after
1478 passes that don't update the cgraph. */
1479 static unsigned int rebuild_edges (void);
1480
1481 /* Rebuild cgraph references for current function node. This needs to be run
1482 after passes that don't update the cgraph. */
1483 static void rebuild_references (void);
1484
1485 /* Expected number of executions: calculated in profile.c. */
1486 gcov_type count;
1487 cgraph_node *caller;
1488 cgraph_node *callee;
1489 cgraph_edge *prev_caller;
1490 cgraph_edge *next_caller;
1491 cgraph_edge *prev_callee;
1492 cgraph_edge *next_callee;
1493 gimple call_stmt;
1494 /* Additional information about an indirect call. Not cleared when an edge
1495 becomes direct. */
1496 cgraph_indirect_call_info *indirect_info;
1497 PTR GTY ((skip (""))) aux;
1498 /* When equal to CIF_OK, inline this call. Otherwise, points to the
1499 explanation why function was not inlined. */
1500 enum cgraph_inline_failed_t inline_failed;
1501 /* The stmt_uid of call_stmt. This is used by LTO to recover the call_stmt
1502 when the function is serialized in. */
1503 unsigned int lto_stmt_uid;
1504 /* Expected frequency of executions within the function.
1505 When set to CGRAPH_FREQ_BASE, the edge is expected to be called once
1506 per function call. The range is 0 to CGRAPH_FREQ_MAX. */
1507 int frequency;
1508 /* Unique id of the edge. */
1509 int uid;
1510 /* Whether this edge was made direct by indirect inlining. */
1511 unsigned int indirect_inlining_edge : 1;
1512 /* Whether this edge describes an indirect call with an undetermined
1513 callee. */
1514 unsigned int indirect_unknown_callee : 1;
1515 /* Whether this edge is still a dangling */
1516 /* True if the corresponding CALL stmt cannot be inlined. */
1517 unsigned int call_stmt_cannot_inline_p : 1;
1518 /* Can this call throw externally? */
1519 unsigned int can_throw_external : 1;
1520 /* Edges with SPECULATIVE flag represents indirect calls that was
1521 speculatively turned into direct (i.e. by profile feedback).
1522 The final code sequence will have form:
1523
1524 if (call_target == expected_fn)
1525 expected_fn ();
1526 else
1527 call_target ();
1528
1529 Every speculative call is represented by three components attached
1530 to a same call statement:
1531 1) a direct call (to expected_fn)
1532 2) an indirect call (to call_target)
1533 3) a IPA_REF_ADDR refrence to expected_fn.
1534
1535 Optimizers may later redirect direct call to clone, so 1) and 3)
1536 do not need to necesarily agree with destination. */
1537 unsigned int speculative : 1;
1538 /* Set to true when caller is a constructor or destructor of polymorphic
1539 type. */
1540 unsigned in_polymorphic_cdtor : 1;
1541
1542 private:
1543 /* Remove the edge from the list of the callers of the callee. */
1544 void remove_caller (void);
1545
1546 /* Remove the edge from the list of the callees of the caller. */
1547 void remove_callee (void);
1548 };
1549
1550 #define CGRAPH_FREQ_BASE 1000
1551 #define CGRAPH_FREQ_MAX 100000
1552
1553 /* The varpool data structure.
1554 Each static variable decl has assigned varpool_node. */
1555
1556 class GTY((tag ("SYMTAB_VARIABLE"))) varpool_node : public symtab_node {
1557 public:
1558 /* Dump given varpool node to F. */
1559 void dump (FILE *f);
1560
1561 /* Dump given varpool node to stderr. */
1562 void DEBUG_FUNCTION debug (void);
1563
1564 /* Remove variable from symbol table. */
1565 void remove (void);
1566
1567 /* Remove node initializer when it is no longer needed. */
1568 void remove_initializer (void);
1569
1570 void analyze (void);
1571
1572 /* Return variable availability. */
1573 availability get_availability (void);
1574
1575 /* When doing LTO, read variable's constructor from disk if
1576 it is not already present. */
1577 tree get_constructor (void);
1578
1579 /* Return true if variable has constructor that can be used for folding. */
1580 bool ctor_useable_for_folding_p (void);
1581
1582 /* For given variable pool node, walk the alias chain to return the function
1583 the variable is alias of. Do not walk through thunks.
1584 When AVAILABILITY is non-NULL, get minimal availability in the chain. */
1585 inline varpool_node *ultimate_alias_target
1586 (availability *availability = NULL);
1587
1588 /* Return node that alias is aliasing. */
1589 inline varpool_node *get_alias_target (void);
1590
1591 /* Output one variable, if necessary. Return whether we output it. */
1592 bool assemble_decl (void);
1593
1594 /* For variables in named sections make sure get_variable_section
1595 is called before we switch to those sections. Then section
1596 conflicts between read-only and read-only requiring relocations
1597 sections can be resolved. */
1598 void finalize_named_section_flags (void);
1599
1600 /* Call calback on varpool symbol and aliases associated to varpool symbol.
1601 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
1602 skipped. */
1603 bool call_for_node_and_aliases (bool (*callback) (varpool_node *, void *),
1604 void *data,
1605 bool include_overwritable);
1606
1607 /* Return true when variable should be considered externally visible. */
1608 bool externally_visible_p (void);
1609
1610 /* Return true when all references to variable must be visible
1611 in ipa_ref_list.
1612 i.e. if the variable is not externally visible or not used in some magic
1613 way (asm statement or such).
1614 The magic uses are all summarized in force_output flag. */
1615 inline bool all_refs_explicit_p ();
1616
1617 /* Return true when variable can be removed from variable pool
1618 if all direct calls are eliminated. */
1619 inline bool can_remove_if_no_refs_p (void);
1620
1621 /* Add the variable DECL to the varpool.
1622 Unlike finalize_decl function is intended to be used
1623 by middle end and allows insertion of new variable at arbitrary point
1624 of compilation. */
1625 static void add (tree decl);
1626
1627 /* Return varpool node for given symbol and check it is a function. */
1628 static inline varpool_node *get (const_tree decl);
1629
1630 /* Mark DECL as finalized. By finalizing the declaration, frontend instruct
1631 the middle end to output the variable to asm file, if needed or externally
1632 visible. */
1633 static void finalize_decl (tree decl);
1634
1635 /* Attempt to mark ALIAS as an alias to DECL. Return TRUE if successful.
1636 Extra name aliases are output whenever DECL is output. */
1637 static varpool_node * create_extra_name_alias (tree alias, tree decl);
1638
1639 /* Attempt to mark ALIAS as an alias to DECL. Return TRUE if successful.
1640 Extra name aliases are output whenever DECL is output. */
1641 static varpool_node * create_alias (tree, tree);
1642
1643 /* Dump the variable pool to F. */
1644 static void dump_varpool (FILE *f);
1645
1646 /* Dump the variable pool to stderr. */
1647 static void DEBUG_FUNCTION debug_varpool (void);
1648
1649 /* Allocate new callgraph node and insert it into basic data structures. */
1650 static varpool_node *create_empty (void);
1651
1652 /* Return varpool node assigned to DECL. Create new one when needed. */
1653 static varpool_node *get_create (tree decl);
1654
1655 /* Given an assembler name, lookup node. */
1656 static varpool_node *get_for_asmname (tree asmname);
1657
1658 /* Set when variable is scheduled to be assembled. */
1659 unsigned output : 1;
1660
1661 /* Set if the variable is dynamically initialized, except for
1662 function local statics. */
1663 unsigned dynamically_initialized : 1;
1664
1665 ENUM_BITFIELD(tls_model) tls_model : 3;
1666
1667 /* Set if the variable is known to be used by single function only.
1668 This is computed by ipa_signle_use pass and used by late optimizations
1669 in places where optimization would be valid for local static variable
1670 if we did not do any inter-procedural code movement. */
1671 unsigned used_by_single_function : 1;
1672
1673 private:
1674 /* Assemble thunks and aliases associated to varpool node. */
1675 void assemble_aliases (void);
1676 };
1677
1678 /* Every top level asm statement is put into a asm_node. */
1679
1680 struct GTY(()) asm_node {
1681
1682
1683 /* Next asm node. */
1684 asm_node *next;
1685 /* String for this asm node. */
1686 tree asm_str;
1687 /* Ordering of all cgraph nodes. */
1688 int order;
1689 };
1690
1691 /* Report whether or not THIS symtab node is a function, aka cgraph_node. */
1692
1693 template <>
1694 template <>
1695 inline bool
1696 is_a_helper <cgraph_node *>::test (symtab_node *p)
1697 {
1698 return p && p->type == SYMTAB_FUNCTION;
1699 }
1700
1701 /* Report whether or not THIS symtab node is a vriable, aka varpool_node. */
1702
1703 template <>
1704 template <>
1705 inline bool
1706 is_a_helper <varpool_node *>::test (symtab_node *p)
1707 {
1708 return p && p->type == SYMTAB_VARIABLE;
1709 }
1710
1711 /* Macros to access the next item in the list of free cgraph nodes and
1712 edges. */
1713 #define NEXT_FREE_NODE(NODE) dyn_cast<cgraph_node *> ((NODE)->next)
1714 #define SET_NEXT_FREE_NODE(NODE,NODE2) ((NODE))->next = NODE2
1715 #define NEXT_FREE_EDGE(EDGE) (EDGE)->prev_caller
1716
1717 typedef void (*cgraph_edge_hook)(cgraph_edge *, void *);
1718 typedef void (*cgraph_node_hook)(cgraph_node *, void *);
1719 typedef void (*varpool_node_hook)(varpool_node *, void *);
1720 typedef void (*cgraph_2edge_hook)(cgraph_edge *, cgraph_edge *, void *);
1721 typedef void (*cgraph_2node_hook)(cgraph_node *, cgraph_node *, void *);
1722
1723 struct cgraph_edge_hook_list;
1724 struct cgraph_node_hook_list;
1725 struct varpool_node_hook_list;
1726 struct cgraph_2edge_hook_list;
1727 struct cgraph_2node_hook_list;
1728
1729 /* Map from a symbol to initialization/finalization priorities. */
1730 struct GTY(()) symbol_priority_map {
1731 priority_type init;
1732 priority_type fini;
1733 };
1734
1735 enum symtab_state
1736 {
1737 /* Frontend is parsing and finalizing functions. */
1738 PARSING,
1739 /* Callgraph is being constructed. It is safe to add new functions. */
1740 CONSTRUCTION,
1741 /* Callgraph is being at LTO time. */
1742 LTO_STREAMING,
1743 /* Callgraph is built and IPA passes are being run. */
1744 IPA,
1745 /* Callgraph is built and all functions are transformed to SSA form. */
1746 IPA_SSA,
1747 /* Functions are now ordered and being passed to RTL expanders. */
1748 EXPANSION,
1749 /* All cgraph expansion is done. */
1750 FINISHED
1751 };
1752
1753 struct asmname_hasher
1754 {
1755 typedef symtab_node *value_type;
1756 typedef const_tree compare_type;
1757 typedef int store_values_directly;
1758
1759 static hashval_t hash (symtab_node *n);
1760 static bool equal (symtab_node *n, const_tree t);
1761 static void ggc_mx (symtab_node *n);
1762 static void pch_nx (symtab_node *&);
1763 static void pch_nx (symtab_node *&, gt_pointer_operator, void *);
1764 static void remove (symtab_node *) {}
1765 };
1766
1767 class GTY((tag ("SYMTAB"))) symbol_table
1768 {
1769 public:
1770 friend class symtab_node;
1771 friend class cgraph_node;
1772 friend class cgraph_edge;
1773
1774 /* Initialize callgraph dump file. */
1775 inline void
1776 initialize (void)
1777 {
1778 if (!dump_file)
1779 dump_file = dump_begin (TDI_cgraph, NULL);
1780 }
1781
1782 /* Register a top-level asm statement ASM_STR. */
1783 inline asm_node *finalize_toplevel_asm (tree asm_str);
1784
1785 /* Analyze the whole compilation unit once it is parsed completely. */
1786 void finalize_compilation_unit (void);
1787
1788 /* C++ frontend produce same body aliases all over the place, even before PCH
1789 gets streamed out. It relies on us linking the aliases with their function
1790 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
1791 first produce aliases without links, but once C++ FE is sure he won't sream
1792 PCH we build the links via this function. */
1793 void process_same_body_aliases (void);
1794
1795 /* Perform simple optimizations based on callgraph. */
1796 void compile (void);
1797
1798 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
1799 functions into callgraph in a way so they look like ordinary reachable
1800 functions inserted into callgraph already at construction time. */
1801 void process_new_functions (void);
1802
1803 /* Once all functions from compilation unit are in memory, produce all clones
1804 and update all calls. We might also do this on demand if we don't want to
1805 bring all functions to memory prior compilation, but current WHOPR
1806 implementation does that and it is is bit easier to keep everything right
1807 in this order. */
1808 void materialize_all_clones (void);
1809
1810 /* Register a symbol NODE. */
1811 inline void register_symbol (symtab_node *node);
1812
1813 inline void
1814 clear_asm_symbols (void)
1815 {
1816 asmnodes = NULL;
1817 asm_last_node = NULL;
1818 }
1819
1820 /* Perform reachability analysis and reclaim all unreachable nodes. */
1821 bool remove_unreachable_nodes (bool before_inlining_p, FILE *file);
1822
1823 /* Optimization of function bodies might've rendered some variables as
1824 unnecessary so we want to avoid these from being compiled. Re-do
1825 reachability starting from variables that are either externally visible
1826 or was referred from the asm output routines. */
1827 void remove_unreferenced_decls (void);
1828
1829 /* Unregister a symbol NODE. */
1830 inline void unregister (symtab_node *node);
1831
1832 /* Allocate new callgraph node and insert it into basic data structures. */
1833 cgraph_node *create_empty (void);
1834
1835 /* Release a callgraph NODE with UID and put in to the list
1836 of free nodes. */
1837 void release_symbol (cgraph_node *node, int uid);
1838
1839 /* Output all variables enqueued to be assembled. */
1840 bool output_variables (void);
1841
1842 /* Weakrefs may be associated to external decls and thus not output
1843 at expansion time. Emit all necessary aliases. */
1844 void output_weakrefs (void);
1845
1846 /* Return first static symbol with definition. */
1847 inline symtab_node *first_symbol (void);
1848
1849 /* Return first assembler symbol. */
1850 inline asm_node *
1851 first_asm_symbol (void)
1852 {
1853 return asmnodes;
1854 }
1855
1856 /* Return first static symbol with definition. */
1857 inline symtab_node *first_defined_symbol (void);
1858
1859 /* Return first variable. */
1860 inline varpool_node *first_variable (void);
1861
1862 /* Return next variable after NODE. */
1863 inline varpool_node *next_variable (varpool_node *node);
1864
1865 /* Return first static variable with initializer. */
1866 inline varpool_node *first_static_initializer (void);
1867
1868 /* Return next static variable with initializer after NODE. */
1869 inline varpool_node *next_static_initializer (varpool_node *node);
1870
1871 /* Return first static variable with definition. */
1872 inline varpool_node *first_defined_variable (void);
1873
1874 /* Return next static variable with definition after NODE. */
1875 inline varpool_node *next_defined_variable (varpool_node *node);
1876
1877 /* Return first function with body defined. */
1878 inline cgraph_node *first_defined_function (void);
1879
1880 /* Return next function with body defined after NODE. */
1881 inline cgraph_node *next_defined_function (cgraph_node *node);
1882
1883 /* Return first function. */
1884 inline cgraph_node *first_function (void);
1885
1886 /* Return next function. */
1887 inline cgraph_node *next_function (cgraph_node *node);
1888
1889 /* Return first function with body defined. */
1890 cgraph_node *first_function_with_gimple_body (void);
1891
1892 /* Return next reachable static variable with initializer after NODE. */
1893 inline cgraph_node *next_function_with_gimple_body (cgraph_node *node);
1894
1895 /* Register HOOK to be called with DATA on each removed edge. */
1896 cgraph_edge_hook_list *add_edge_removal_hook (cgraph_edge_hook hook,
1897 void *data);
1898
1899 /* Remove ENTRY from the list of hooks called on removing edges. */
1900 void remove_edge_removal_hook (cgraph_edge_hook_list *entry);
1901
1902 /* Register HOOK to be called with DATA on each removed node. */
1903 cgraph_node_hook_list *add_cgraph_removal_hook (cgraph_node_hook hook,
1904 void *data);
1905
1906 /* Remove ENTRY from the list of hooks called on removing nodes. */
1907 void remove_cgraph_removal_hook (cgraph_node_hook_list *entry);
1908
1909 /* Register HOOK to be called with DATA on each removed node. */
1910 varpool_node_hook_list *add_varpool_removal_hook (varpool_node_hook hook,
1911 void *data);
1912
1913 /* Remove ENTRY from the list of hooks called on removing nodes. */
1914 void remove_varpool_removal_hook (varpool_node_hook_list *entry);
1915
1916 /* Register HOOK to be called with DATA on each inserted node. */
1917 cgraph_node_hook_list *add_cgraph_insertion_hook (cgraph_node_hook hook,
1918 void *data);
1919
1920 /* Remove ENTRY from the list of hooks called on inserted nodes. */
1921 void remove_cgraph_insertion_hook (cgraph_node_hook_list *entry);
1922
1923 /* Register HOOK to be called with DATA on each inserted node. */
1924 varpool_node_hook_list *add_varpool_insertion_hook (varpool_node_hook hook,
1925 void *data);
1926
1927 /* Remove ENTRY from the list of hooks called on inserted nodes. */
1928 void remove_varpool_insertion_hook (varpool_node_hook_list *entry);
1929
1930 /* Register HOOK to be called with DATA on each duplicated edge. */
1931 cgraph_2edge_hook_list *add_edge_duplication_hook (cgraph_2edge_hook hook,
1932 void *data);
1933 /* Remove ENTRY from the list of hooks called on duplicating edges. */
1934 void remove_edge_duplication_hook (cgraph_2edge_hook_list *entry);
1935
1936 /* Register HOOK to be called with DATA on each duplicated node. */
1937 cgraph_2node_hook_list *add_cgraph_duplication_hook (cgraph_2node_hook hook,
1938 void *data);
1939
1940 /* Remove ENTRY from the list of hooks called on duplicating nodes. */
1941 void remove_cgraph_duplication_hook (cgraph_2node_hook_list *entry);
1942
1943 /* Call all edge removal hooks. */
1944 void call_edge_removal_hooks (cgraph_edge *e);
1945
1946 /* Call all node insertion hooks. */
1947 void call_cgraph_insertion_hooks (cgraph_node *node);
1948
1949 /* Call all node removal hooks. */
1950 void call_cgraph_removal_hooks (cgraph_node *node);
1951
1952 /* Call all node duplication hooks. */
1953 void call_cgraph_duplication_hooks (cgraph_node *node, cgraph_node *node2);
1954
1955 /* Call all edge duplication hooks. */
1956 void call_edge_duplication_hooks (cgraph_edge *cs1, cgraph_edge *cs2);
1957
1958 /* Call all node removal hooks. */
1959 void call_varpool_removal_hooks (varpool_node *node);
1960
1961 /* Call all node insertion hooks. */
1962 void call_varpool_insertion_hooks (varpool_node *node);
1963
1964 /* Arrange node to be first in its entry of assembler_name_hash. */
1965 void symtab_prevail_in_asm_name_hash (symtab_node *node);
1966
1967 /* Initalize asm name hash unless. */
1968 void symtab_initialize_asm_name_hash (void);
1969
1970 /* Set the DECL_ASSEMBLER_NAME and update symtab hashtables. */
1971 void change_decl_assembler_name (tree decl, tree name);
1972
1973 int cgraph_count;
1974 int cgraph_max_uid;
1975
1976 int edges_count;
1977 int edges_max_uid;
1978
1979 symtab_node* GTY(()) nodes;
1980 asm_node* GTY(()) asmnodes;
1981 asm_node* GTY(()) asm_last_node;
1982 cgraph_node* GTY(()) free_nodes;
1983
1984 /* Head of a linked list of unused (freed) call graph edges.
1985 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
1986 cgraph_edge * GTY(()) free_edges;
1987
1988 /* The order index of the next symtab node to be created. This is
1989 used so that we can sort the cgraph nodes in order by when we saw
1990 them, to support -fno-toplevel-reorder. */
1991 int order;
1992
1993 /* Set when whole unit has been analyzed so we can access global info. */
1994 bool global_info_ready;
1995 /* What state callgraph is in right now. */
1996 enum symtab_state state;
1997 /* Set when the cgraph is fully build and the basic flags are computed. */
1998 bool function_flags_ready;
1999
2000 bool cpp_implicit_aliases_done;
2001
2002 /* Hash table used to hold sectoons. */
2003 hash_table<section_name_hasher> *GTY(()) section_hash;
2004
2005 /* Hash table used to convert assembler names into nodes. */
2006 hash_table<asmname_hasher> *assembler_name_hash;
2007
2008 /* Hash table used to hold init priorities. */
2009 hash_map<symtab_node *, symbol_priority_map> *init_priority_hash;
2010
2011 FILE* GTY ((skip)) dump_file;
2012
2013 private:
2014 /* Allocate new callgraph node. */
2015 inline cgraph_node * allocate_cgraph_symbol (void);
2016
2017 /* Allocate a cgraph_edge structure and fill it with data according to the
2018 parameters of which only CALLEE can be NULL (when creating an indirect call
2019 edge). */
2020 cgraph_edge *create_edge (cgraph_node *caller, cgraph_node *callee,
2021 gimple call_stmt, gcov_type count, int freq,
2022 bool indir_unknown_callee);
2023
2024 /* Put the edge onto the free list. */
2025 void free_edge (cgraph_edge *e);
2026
2027 /* Insert NODE to assembler name hash. */
2028 void insert_to_assembler_name_hash (symtab_node *node, bool with_clones);
2029
2030 /* Remove NODE from assembler name hash. */
2031 void unlink_from_assembler_name_hash (symtab_node *node, bool with_clones);
2032
2033 /* Hash asmnames ignoring the user specified marks. */
2034 static hashval_t decl_assembler_name_hash (const_tree asmname);
2035
2036 /* Compare ASMNAME with the DECL_ASSEMBLER_NAME of DECL. */
2037 static bool decl_assembler_name_equal (tree decl, const_tree asmname);
2038
2039 friend struct asmname_hasher;
2040
2041 /* List of hooks triggered when an edge is removed. */
2042 cgraph_edge_hook_list * GTY((skip)) m_first_edge_removal_hook;
2043 /* List of hooks triggem_red when a cgraph node is removed. */
2044 cgraph_node_hook_list * GTY((skip)) m_first_cgraph_removal_hook;
2045 /* List of hooks triggered when an edge is duplicated. */
2046 cgraph_2edge_hook_list * GTY((skip)) m_first_edge_duplicated_hook;
2047 /* List of hooks triggered when a node is duplicated. */
2048 cgraph_2node_hook_list * GTY((skip)) m_first_cgraph_duplicated_hook;
2049 /* List of hooks triggered when an function is inserted. */
2050 cgraph_node_hook_list * GTY((skip)) m_first_cgraph_insertion_hook;
2051 /* List of hooks triggered when an variable is inserted. */
2052 varpool_node_hook_list * GTY((skip)) m_first_varpool_insertion_hook;
2053 /* List of hooks triggered when a node is removed. */
2054 varpool_node_hook_list * GTY((skip)) m_first_varpool_removal_hook;
2055 };
2056
2057 extern GTY(()) symbol_table *symtab;
2058
2059 extern vec<cgraph_node *> cgraph_new_nodes;
2060
2061 inline hashval_t
2062 asmname_hasher::hash (symtab_node *n)
2063 {
2064 return symbol_table::decl_assembler_name_hash
2065 (DECL_ASSEMBLER_NAME (n->decl));
2066 }
2067
2068 inline bool
2069 asmname_hasher::equal (symtab_node *n, const_tree t)
2070 {
2071 return symbol_table::decl_assembler_name_equal (n->decl, t);
2072 }
2073
2074 extern void gt_ggc_mx (symtab_node *&);
2075
2076 inline void
2077 asmname_hasher::ggc_mx (symtab_node *n)
2078 {
2079 gt_ggc_mx (n);
2080 }
2081
2082 extern void gt_pch_nx (symtab_node *&);
2083
2084 inline void
2085 asmname_hasher::pch_nx (symtab_node *&n)
2086 {
2087 gt_pch_nx (n);
2088 }
2089
2090 inline void
2091 asmname_hasher::pch_nx (symtab_node *&n, gt_pointer_operator op, void *cookie)
2092 {
2093 op (&n, cookie);
2094 }
2095
2096 /* In cgraph.c */
2097 void release_function_body (tree);
2098 cgraph_indirect_call_info *cgraph_allocate_init_indirect_info (void);
2099
2100 void cgraph_update_edges_for_call_stmt (gimple, tree, gimple);
2101 bool cgraph_function_possibly_inlined_p (tree);
2102
2103 const char* cgraph_inline_failed_string (cgraph_inline_failed_t);
2104 cgraph_inline_failed_type_t cgraph_inline_failed_type (cgraph_inline_failed_t);
2105
2106 bool resolution_used_from_other_file_p (enum ld_plugin_symbol_resolution);
2107 extern bool gimple_check_call_matching_types (gimple, tree, bool);
2108
2109 /* In cgraphunit.c */
2110 /* Initialize datastructures so DECL is a function in lowered gimple form.
2111 IN_SSA is true if the gimple is in SSA. */
2112 basic_block init_lowered_empty_function (tree, bool);
2113
2114 /* In cgraphclones.c */
2115
2116 tree clone_function_name (tree decl, const char *);
2117
2118 void tree_function_versioning (tree, tree, vec<ipa_replace_map *, va_gc> *,
2119 bool, bitmap, bool, bitmap, basic_block);
2120
2121 /* In cgraphbuild.c */
2122 int compute_call_stmt_bb_frequency (tree, basic_block bb);
2123 void record_references_in_initializer (tree, bool);
2124
2125 /* In ipa.c */
2126 void cgraph_build_static_cdtor (char which, tree body, int priority);
2127 void ipa_discover_readonly_nonaddressable_vars (void);
2128
2129 /* In varpool.c */
2130 tree ctor_for_folding (tree);
2131
2132 /* Return true when the symbol is real symbol, i.e. it is not inline clone
2133 or abstract function kept for debug info purposes only. */
2134 inline bool
2135 symtab_node::real_symbol_p (void)
2136 {
2137 cgraph_node *cnode;
2138
2139 if (DECL_ABSTRACT_P (decl))
2140 return false;
2141 if (!is_a <cgraph_node *> (this))
2142 return true;
2143 cnode = dyn_cast <cgraph_node *> (this);
2144 if (cnode->global.inlined_to)
2145 return false;
2146 return true;
2147 }
2148
2149 /* Return true if DECL should have entry in symbol table if used.
2150 Those are functions and static & external veriables*/
2151
2152 static inline bool
2153 decl_in_symtab_p (const_tree decl)
2154 {
2155 return (TREE_CODE (decl) == FUNCTION_DECL
2156 || (TREE_CODE (decl) == VAR_DECL
2157 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl))));
2158 }
2159
2160 inline bool
2161 symtab_node::in_same_comdat_group_p (symtab_node *target)
2162 {
2163 symtab_node *source = this;
2164
2165 if (cgraph_node *cn = dyn_cast <cgraph_node *> (target))
2166 {
2167 if (cn->global.inlined_to)
2168 source = cn->global.inlined_to;
2169 }
2170 if (cgraph_node *cn = dyn_cast <cgraph_node *> (target))
2171 {
2172 if (cn->global.inlined_to)
2173 target = cn->global.inlined_to;
2174 }
2175
2176 return source->get_comdat_group () == target->get_comdat_group ();
2177 }
2178
2179 /* Return node that alias is aliasing. */
2180
2181 inline symtab_node *
2182 symtab_node::get_alias_target (void)
2183 {
2184 ipa_ref *ref = NULL;
2185 iterate_reference (0, ref);
2186 gcc_checking_assert (ref->use == IPA_REF_ALIAS);
2187 return ref->referred;
2188 }
2189
2190 /* Return next reachable static symbol with initializer after the node. */
2191 inline symtab_node *
2192 symtab_node::next_defined_symbol (void)
2193 {
2194 symtab_node *node1 = next;
2195
2196 for (; node1; node1 = node1->next)
2197 if (node1->definition)
2198 return node1;
2199
2200 return NULL;
2201 }
2202
2203 /* Return varpool node for given symbol and check it is a function. */
2204
2205 inline varpool_node *
2206 varpool_node::get (const_tree decl)
2207 {
2208 gcc_checking_assert (TREE_CODE (decl) == VAR_DECL);
2209 return dyn_cast<varpool_node *> (symtab_node::get (decl));
2210 }
2211
2212 /* Register a symbol NODE. */
2213
2214 inline void
2215 symbol_table::register_symbol (symtab_node *node)
2216 {
2217 node->next = nodes;
2218 node->previous = NULL;
2219
2220 if (nodes)
2221 nodes->previous = node;
2222 nodes = node;
2223
2224 node->order = order++;
2225 }
2226
2227 /* Register a top-level asm statement ASM_STR. */
2228
2229 asm_node *
2230 symbol_table::finalize_toplevel_asm (tree asm_str)
2231 {
2232 asm_node *node;
2233
2234 node = ggc_cleared_alloc<asm_node> ();
2235 node->asm_str = asm_str;
2236 node->order = order++;
2237 node->next = NULL;
2238
2239 if (asmnodes == NULL)
2240 asmnodes = node;
2241 else
2242 asm_last_node->next = node;
2243
2244 asm_last_node = node;
2245 return node;
2246 }
2247
2248 /* Unregister a symbol NODE. */
2249 inline void
2250 symbol_table::unregister (symtab_node *node)
2251 {
2252 if (node->previous)
2253 node->previous->next = node->next;
2254 else
2255 nodes = node->next;
2256
2257 if (node->next)
2258 node->next->previous = node->previous;
2259
2260 node->next = NULL;
2261 node->previous = NULL;
2262 }
2263
2264 /* Allocate new callgraph node and insert it into basic data structures. */
2265
2266 inline cgraph_node *
2267 symbol_table::create_empty (void)
2268 {
2269 cgraph_node *node = allocate_cgraph_symbol ();
2270
2271 node->type = SYMTAB_FUNCTION;
2272 node->frequency = NODE_FREQUENCY_NORMAL;
2273 node->count_materialization_scale = REG_BR_PROB_BASE;
2274 cgraph_count++;
2275
2276 return node;
2277 }
2278
2279 /* Release a callgraph NODE with UID and put in to the list of free nodes. */
2280
2281 inline void
2282 symbol_table::release_symbol (cgraph_node *node, int uid)
2283 {
2284 cgraph_count--;
2285
2286 /* Clear out the node to NULL all pointers and add the node to the free
2287 list. */
2288 memset (node, 0, sizeof (*node));
2289 node->type = SYMTAB_FUNCTION;
2290 node->uid = uid;
2291 SET_NEXT_FREE_NODE (node, free_nodes);
2292 free_nodes = node;
2293 }
2294
2295 /* Allocate new callgraph node. */
2296
2297 inline cgraph_node *
2298 symbol_table::allocate_cgraph_symbol (void)
2299 {
2300 cgraph_node *node;
2301
2302 if (free_nodes)
2303 {
2304 node = free_nodes;
2305 free_nodes = NEXT_FREE_NODE (node);
2306 }
2307 else
2308 {
2309 node = ggc_cleared_alloc<cgraph_node> ();
2310 node->uid = cgraph_max_uid++;
2311 }
2312
2313 return node;
2314 }
2315
2316
2317 /* Return first static symbol with definition. */
2318 inline symtab_node *
2319 symbol_table::first_symbol (void)
2320 {
2321 return nodes;
2322 }
2323
2324 /* Walk all symbols. */
2325 #define FOR_EACH_SYMBOL(node) \
2326 for ((node) = symtab->first_symbol (); (node); (node) = (node)->next)
2327
2328 /* Return first static symbol with definition. */
2329 inline symtab_node *
2330 symbol_table::first_defined_symbol (void)
2331 {
2332 symtab_node *node;
2333
2334 for (node = nodes; node; node = node->next)
2335 if (node->definition)
2336 return node;
2337
2338 return NULL;
2339 }
2340
2341 /* Walk all symbols with definitions in current unit. */
2342 #define FOR_EACH_DEFINED_SYMBOL(node) \
2343 for ((node) = symtab->first_defined_symbol (); (node); \
2344 (node) = node->next_defined_symbol ())
2345
2346 /* Return first variable. */
2347 inline varpool_node *
2348 symbol_table::first_variable (void)
2349 {
2350 symtab_node *node;
2351 for (node = nodes; node; node = node->next)
2352 if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
2353 return vnode;
2354 return NULL;
2355 }
2356
2357 /* Return next variable after NODE. */
2358 inline varpool_node *
2359 symbol_table::next_variable (varpool_node *node)
2360 {
2361 symtab_node *node1 = node->next;
2362 for (; node1; node1 = node1->next)
2363 if (varpool_node *vnode1 = dyn_cast <varpool_node *> (node1))
2364 return vnode1;
2365 return NULL;
2366 }
2367 /* Walk all variables. */
2368 #define FOR_EACH_VARIABLE(node) \
2369 for ((node) = symtab->first_variable (); \
2370 (node); \
2371 (node) = symtab->next_variable ((node)))
2372
2373 /* Return first static variable with initializer. */
2374 inline varpool_node *
2375 symbol_table::first_static_initializer (void)
2376 {
2377 symtab_node *node;
2378 for (node = nodes; node; node = node->next)
2379 {
2380 varpool_node *vnode = dyn_cast <varpool_node *> (node);
2381 if (vnode && DECL_INITIAL (node->decl))
2382 return vnode;
2383 }
2384 return NULL;
2385 }
2386
2387 /* Return next static variable with initializer after NODE. */
2388 inline varpool_node *
2389 symbol_table::next_static_initializer (varpool_node *node)
2390 {
2391 symtab_node *node1 = node->next;
2392 for (; node1; node1 = node1->next)
2393 {
2394 varpool_node *vnode1 = dyn_cast <varpool_node *> (node1);
2395 if (vnode1 && DECL_INITIAL (node1->decl))
2396 return vnode1;
2397 }
2398 return NULL;
2399 }
2400
2401 /* Walk all static variables with initializer set. */
2402 #define FOR_EACH_STATIC_INITIALIZER(node) \
2403 for ((node) = symtab->first_static_initializer (); (node); \
2404 (node) = symtab->next_static_initializer (node))
2405
2406 /* Return first static variable with definition. */
2407 inline varpool_node *
2408 symbol_table::first_defined_variable (void)
2409 {
2410 symtab_node *node;
2411 for (node = nodes; node; node = node->next)
2412 {
2413 varpool_node *vnode = dyn_cast <varpool_node *> (node);
2414 if (vnode && vnode->definition)
2415 return vnode;
2416 }
2417 return NULL;
2418 }
2419
2420 /* Return next static variable with definition after NODE. */
2421 inline varpool_node *
2422 symbol_table::next_defined_variable (varpool_node *node)
2423 {
2424 symtab_node *node1 = node->next;
2425 for (; node1; node1 = node1->next)
2426 {
2427 varpool_node *vnode1 = dyn_cast <varpool_node *> (node1);
2428 if (vnode1 && vnode1->definition)
2429 return vnode1;
2430 }
2431 return NULL;
2432 }
2433 /* Walk all variables with definitions in current unit. */
2434 #define FOR_EACH_DEFINED_VARIABLE(node) \
2435 for ((node) = symtab->first_defined_variable (); (node); \
2436 (node) = symtab->next_defined_variable (node))
2437
2438 /* Return first function with body defined. */
2439 inline cgraph_node *
2440 symbol_table::first_defined_function (void)
2441 {
2442 symtab_node *node;
2443 for (node = nodes; node; node = node->next)
2444 {
2445 cgraph_node *cn = dyn_cast <cgraph_node *> (node);
2446 if (cn && cn->definition)
2447 return cn;
2448 }
2449 return NULL;
2450 }
2451
2452 /* Return next function with body defined after NODE. */
2453 inline cgraph_node *
2454 symbol_table::next_defined_function (cgraph_node *node)
2455 {
2456 symtab_node *node1 = node->next;
2457 for (; node1; node1 = node1->next)
2458 {
2459 cgraph_node *cn1 = dyn_cast <cgraph_node *> (node1);
2460 if (cn1 && cn1->definition)
2461 return cn1;
2462 }
2463 return NULL;
2464 }
2465
2466 /* Walk all functions with body defined. */
2467 #define FOR_EACH_DEFINED_FUNCTION(node) \
2468 for ((node) = symtab->first_defined_function (); (node); \
2469 (node) = symtab->next_defined_function ((node)))
2470
2471 /* Return first function. */
2472 inline cgraph_node *
2473 symbol_table::first_function (void)
2474 {
2475 symtab_node *node;
2476 for (node = nodes; node; node = node->next)
2477 if (cgraph_node *cn = dyn_cast <cgraph_node *> (node))
2478 return cn;
2479 return NULL;
2480 }
2481
2482 /* Return next function. */
2483 inline cgraph_node *
2484 symbol_table::next_function (cgraph_node *node)
2485 {
2486 symtab_node *node1 = node->next;
2487 for (; node1; node1 = node1->next)
2488 if (cgraph_node *cn1 = dyn_cast <cgraph_node *> (node1))
2489 return cn1;
2490 return NULL;
2491 }
2492
2493 /* Return first function with body defined. */
2494 inline cgraph_node *
2495 symbol_table::first_function_with_gimple_body (void)
2496 {
2497 symtab_node *node;
2498 for (node = nodes; node; node = node->next)
2499 {
2500 cgraph_node *cn = dyn_cast <cgraph_node *> (node);
2501 if (cn && cn->has_gimple_body_p ())
2502 return cn;
2503 }
2504 return NULL;
2505 }
2506
2507 /* Return next reachable static variable with initializer after NODE. */
2508 inline cgraph_node *
2509 symbol_table::next_function_with_gimple_body (cgraph_node *node)
2510 {
2511 symtab_node *node1 = node->next;
2512 for (; node1; node1 = node1->next)
2513 {
2514 cgraph_node *cn1 = dyn_cast <cgraph_node *> (node1);
2515 if (cn1 && cn1->has_gimple_body_p ())
2516 return cn1;
2517 }
2518 return NULL;
2519 }
2520
2521 /* Walk all functions. */
2522 #define FOR_EACH_FUNCTION(node) \
2523 for ((node) = symtab->first_function (); (node); \
2524 (node) = symtab->next_function ((node)))
2525
2526 /* Return true when callgraph node is a function with Gimple body defined
2527 in current unit. Functions can also be define externally or they
2528 can be thunks with no Gimple representation.
2529
2530 Note that at WPA stage, the function body may not be present in memory. */
2531
2532 inline bool
2533 cgraph_node::has_gimple_body_p (void)
2534 {
2535 return definition && !thunk.thunk_p && !alias;
2536 }
2537
2538 /* Walk all functions with body defined. */
2539 #define FOR_EACH_FUNCTION_WITH_GIMPLE_BODY(node) \
2540 for ((node) = symtab->first_function_with_gimple_body (); (node); \
2541 (node) = symtab->next_function_with_gimple_body (node))
2542
2543 /* Create a new static variable of type TYPE. */
2544 tree add_new_static_var (tree type);
2545
2546 /* Uniquize all constants that appear in memory.
2547 Each constant in memory thus far output is recorded
2548 in `const_desc_table'. */
2549
2550 struct GTY((for_user)) constant_descriptor_tree {
2551 /* A MEM for the constant. */
2552 rtx rtl;
2553
2554 /* The value of the constant. */
2555 tree value;
2556
2557 /* Hash of value. Computing the hash from value each time
2558 hashfn is called can't work properly, as that means recursive
2559 use of the hash table during hash table expansion. */
2560 hashval_t hash;
2561 };
2562
2563 /* Return true when function is only called directly or it has alias.
2564 i.e. it is not externally visible, address was not taken and
2565 it is not used in any other non-standard way. */
2566
2567 inline bool
2568 cgraph_node::only_called_directly_or_aliased_p (void)
2569 {
2570 gcc_assert (!global.inlined_to);
2571 return (!force_output && !address_taken
2572 && !used_from_other_partition
2573 && !DECL_VIRTUAL_P (decl)
2574 && !DECL_STATIC_CONSTRUCTOR (decl)
2575 && !DECL_STATIC_DESTRUCTOR (decl)
2576 && !externally_visible);
2577 }
2578
2579 /* Return true when variable can be removed from variable pool
2580 if all direct calls are eliminated. */
2581
2582 inline bool
2583 varpool_node::can_remove_if_no_refs_p (void)
2584 {
2585 if (DECL_EXTERNAL (decl))
2586 return true;
2587 return (!force_output && !used_from_other_partition
2588 && ((DECL_COMDAT (decl)
2589 && !forced_by_abi
2590 && !used_from_object_file_p ())
2591 || !externally_visible
2592 || DECL_HAS_VALUE_EXPR_P (decl)));
2593 }
2594
2595 /* Return true when all references to variable must be visible in ipa_ref_list.
2596 i.e. if the variable is not externally visible or not used in some magic
2597 way (asm statement or such).
2598 The magic uses are all summarized in force_output flag. */
2599
2600 inline bool
2601 varpool_node::all_refs_explicit_p ()
2602 {
2603 return (definition
2604 && !externally_visible
2605 && !used_from_other_partition
2606 && !force_output);
2607 }
2608
2609 struct tree_descriptor_hasher : ggc_hasher<constant_descriptor_tree *>
2610 {
2611 static hashval_t hash (constant_descriptor_tree *);
2612 static bool equal (constant_descriptor_tree *, constant_descriptor_tree *);
2613 };
2614
2615 /* Constant pool accessor function. */
2616 hash_table<tree_descriptor_hasher> *constant_pool_htab (void);
2617
2618 /* Return node that alias is aliasing. */
2619
2620 inline cgraph_node *
2621 cgraph_node::get_alias_target (void)
2622 {
2623 return dyn_cast <cgraph_node *> (symtab_node::get_alias_target ());
2624 }
2625
2626 /* Return node that alias is aliasing. */
2627
2628 inline varpool_node *
2629 varpool_node::get_alias_target (void)
2630 {
2631 return dyn_cast <varpool_node *> (symtab_node::get_alias_target ());
2632 }
2633
2634 /* Given function symbol, walk the alias chain to return the function node
2635 is alias of. Do not walk through thunks.
2636 When AVAILABILITY is non-NULL, get minimal availability in the chain. */
2637
2638 inline cgraph_node *
2639 cgraph_node::ultimate_alias_target (enum availability *availability)
2640 {
2641 cgraph_node *n = dyn_cast <cgraph_node *> (symtab_node::ultimate_alias_target
2642 (availability));
2643 if (!n && availability)
2644 *availability = AVAIL_NOT_AVAILABLE;
2645 return n;
2646 }
2647
2648 /* For given variable pool node, walk the alias chain to return the function
2649 the variable is alias of. Do not walk through thunks.
2650 When AVAILABILITY is non-NULL, get minimal availability in the chain. */
2651
2652 inline varpool_node *
2653 varpool_node::ultimate_alias_target (availability *availability)
2654 {
2655 varpool_node *n = dyn_cast <varpool_node *>
2656 (symtab_node::ultimate_alias_target (availability));
2657
2658 if (!n && availability)
2659 *availability = AVAIL_NOT_AVAILABLE;
2660 return n;
2661 }
2662
2663 /* Return true when the edge represents a direct recursion. */
2664 inline bool
2665 cgraph_edge::recursive_p (void)
2666 {
2667 cgraph_node *c = callee->ultimate_alias_target ();
2668 if (caller->global.inlined_to)
2669 return caller->global.inlined_to->decl == c->decl;
2670 else
2671 return caller->decl == c->decl;
2672 }
2673
2674 /* Return true if the TM_CLONE bit is set for a given FNDECL. */
2675 static inline bool
2676 decl_is_tm_clone (const_tree fndecl)
2677 {
2678 cgraph_node *n = cgraph_node::get (fndecl);
2679 if (n)
2680 return n->tm_clone;
2681 return false;
2682 }
2683
2684 /* Likewise indicate that a node is needed, i.e. reachable via some
2685 external means. */
2686
2687 inline void
2688 cgraph_node::mark_force_output (void)
2689 {
2690 force_output = 1;
2691 gcc_checking_assert (!global.inlined_to);
2692 }
2693
2694 inline symtab_node * symtab_node::get_create (tree node)
2695 {
2696 if (TREE_CODE (node) == VAR_DECL)
2697 return varpool_node::get_create (node);
2698 else
2699 return cgraph_node::get_create (node);
2700 }
2701
2702 /* Build polymorphic call context for indirect call E. */
2703
2704 inline
2705 ipa_polymorphic_call_context::ipa_polymorphic_call_context (cgraph_edge *e)
2706 {
2707 gcc_checking_assert (e->indirect_info->polymorphic);
2708 *this = e->indirect_info->context;
2709 }
2710
2711 /* Build empty "I know nothing" context. */
2712
2713 inline
2714 ipa_polymorphic_call_context::ipa_polymorphic_call_context ()
2715 {
2716 clear_speculation ();
2717 clear_outer_type ();
2718 invalid = false;
2719 }
2720
2721 /* Make context non-speculative. */
2722
2723 inline void
2724 ipa_polymorphic_call_context::clear_speculation ()
2725 {
2726 speculative_outer_type = NULL;
2727 speculative_offset = 0;
2728 speculative_maybe_derived_type = false;
2729 }
2730
2731 /* Produce context specifying all derrived types of OTR_TYPE.
2732 If OTR_TYPE is NULL or type of the OBJ_TYPE_REF, the context is set
2733 to dummy "I know nothing" setting. */
2734
2735 inline void
2736 ipa_polymorphic_call_context::clear_outer_type (tree otr_type)
2737 {
2738 outer_type = otr_type ? TYPE_MAIN_VARIANT (otr_type) : NULL;
2739 offset = 0;
2740 maybe_derived_type = true;
2741 maybe_in_construction = true;
2742 dynamic = true;
2743 }
2744
2745 /* Adjust all offsets in contexts by OFF bits. */
2746
2747 inline void
2748 ipa_polymorphic_call_context::offset_by (HOST_WIDE_INT off)
2749 {
2750 if (outer_type)
2751 offset += off;
2752 if (speculative_outer_type)
2753 speculative_offset += off;
2754 }
2755
2756 /* Return TRUE if context is fully useless. */
2757
2758 inline bool
2759 ipa_polymorphic_call_context::useless_p () const
2760 {
2761 return (!outer_type && !speculative_outer_type);
2762 }
2763 #endif /* GCC_CGRAPH_H */