/* Callgraph handling code.
- Copyright (C) 2003-2016 Free Software Foundation, Inc.
+ Copyright (C) 2003-2017 Free Software Foundation, Inc.
Contributed by Jan Hubicka
This file is part of GCC.
#ifndef GCC_CGRAPH_H
#define GCC_CGRAPH_H
+#include "profile-count.h"
#include "ipa-ref.h"
#include "plugin-api.h"
/* Return name. */
const char *name () const;
+ /* Return dump name. */
+ const char *dump_name () const;
+
/* Return asm name. */
- const char * asm_name () const;
+ const char *asm_name () const;
+
+ /* Return dump name with assembler name. */
+ const char *dump_asm_name () const;
/* Add node into symbol table. This function is not used directly, but via
cgraph/varpool node creation routines. */
enum ipa_ref_use use_type, gimple *stmt);
/* If VAL is a reference to a function or a variable, add a reference from
- this symtab_node to the corresponding symbol table node. USE_TYPE specify
- type of the use and STMT the statement (if it exists). Return the new
+ this symtab_node to the corresponding symbol table node. Return the new
reference or NULL if none was created. */
- ipa_ref *maybe_create_reference (tree val, enum ipa_ref_use use_type,
- gimple *stmt);
+ ipa_ref *maybe_create_reference (tree val, gimple *stmt);
/* Clone all references from symtab NODE to this symtab_node. */
void clone_references (symtab_node *node);
/* Walk the alias chain to return the symbol NODE is alias of.
If NODE is not an alias, return NODE.
- When AVAILABILITY is non-NULL, get minimal availability in the chain. */
- symtab_node *ultimate_alias_target (enum availability *avail = NULL);
+ When AVAILABILITY is non-NULL, get minimal availability in the chain.
+ When REF is non-NULL, assume that reference happens in symbol REF
+ when determining the availability. */
+ symtab_node *ultimate_alias_target (enum availability *avail = NULL,
+ struct symtab_node *ref = NULL);
/* Return next reachable static symbol with initializer after NODE. */
inline symtab_node *next_defined_symbol (void);
/* Return the initialization priority. */
priority_type get_init_priority ();
- /* Return availability of NODE. */
- enum availability get_availability (void);
+ /* Return availability of NODE when referenced from REF. */
+ enum availability get_availability (symtab_node *ref = NULL);
+
+ /* Return true if NODE binds to current definition in final executable
+ when referenced from REF. If REF is NULL return conservative value
+ for any reference. */
+ bool binds_to_current_def_p (symtab_node *ref = NULL);
/* Make DECL local. */
void make_decl_local (void);
+ /* Copy visibility from N. */
+ void copy_visibility_from (symtab_node *n);
+
/* Return desired alignment of the definition. This is NOT alignment useful
to access THIS, because THIS may be interposable and DECL_ALIGN should
be used instead. It however must be guaranteed when output definition
configury. This function is used just during symbol creation. */
bool needed_p (void);
+ /* Return true if this symbol is a function from the C frontend specified
+ directly in RTL form (with "__RTL"). */
+ bool native_rtl_p () const;
+
/* Return true when there are references to the node. */
bool referred_to_p (bool include_self = true);
Return NULL if there's no such node. */
static symtab_node *get_for_asmname (const_tree asmname);
- /* Dump symbol table to F. */
- static void dump_table (FILE *);
-
- /* Dump symbol table to stderr. */
- static inline DEBUG_FUNCTION void debug_symtab (void)
- {
- dump_table (stderr);
- }
-
/* Verify symbol table for internal consistency. */
static DEBUG_FUNCTION void verify_symtab_nodes (void);
static bool noninterposable_alias (symtab_node *node, void *data);
/* Worker for ultimate_alias_target. */
- symtab_node *ultimate_alias_target_1 (enum availability *avail = NULL);
+ symtab_node *ultimate_alias_target_1 (enum availability *avail = NULL,
+ symtab_node *ref = NULL);
+
+ /* Get dump name with normal or assembly name. */
+ const char *get_dump_name (bool asm_name_p) const;
};
inline void
extern const char * const ld_plugin_symbol_resolution_names[];
extern const char * const tls_model_names[];
-/* Information about thunk, used only for same body aliases. */
+/* Sub-structure of cgraph_node. Holds information about thunk, used only for
+ same body aliases.
+
+ Thunks are basically wrappers around methods which are introduced in case
+ of multiple inheritance in order to adjust the value of the "this" pointer
+ or of the returned value.
+
+ In the case of this-adjusting thunks, each back-end can override the
+ can_output_mi_thunk/output_mi_thunk target hooks to generate a minimal thunk
+ (with a tail call for instance) directly as assembly. For the default hook
+ or for the case where the can_output_mi_thunk hooks return false, the thunk
+ is gimplified and lowered using the regular machinery. */
struct GTY(()) cgraph_thunk_info {
- /* Information about the thunk. */
+ /* Offset used to adjust "this". */
HOST_WIDE_INT fixed_offset;
+
+ /* Offset in the virtual table to get the offset to adjust "this". Valid iff
+ VIRTUAL_OFFSET_P is true. */
HOST_WIDE_INT virtual_value;
+
+ /* Thunk target, i.e. the method that this thunk wraps. Depending on the
+ TARGET_USE_LOCAL_THUNK_ALIAS_P macro, this may have to be a new alias. */
tree alias;
+
+ /* Nonzero for a "this" adjusting thunk and zero for a result adjusting
+ thunk. */
bool this_adjusting;
+
+ /* If true, this thunk is what we call a virtual thunk. In this case:
+ * for this-adjusting thunks, after the FIXED_OFFSET based adjustment is
+ done, add to the result the offset found in the vtable at:
+ vptr + VIRTUAL_VALUE
+ * for result-adjusting thunks, the FIXED_OFFSET adjustment is done after
+ the virtual one. */
bool virtual_offset_p;
+
+ /* ??? True for special kind of thunks, seems related to instrumentation. */
bool add_pointer_bounds_args;
- /* Set to true when alias node is thunk. */
+
+ /* Set to true when alias node (the cgraph_node to which this struct belong)
+ is a thunk. Access to any other fields is invalid if this is false. */
bool thunk_p;
};
/* Max hardware vector size in bits for floating point vectors. */
unsigned int vecsize_float;
+ /* Machine mode of the mask argument(s), if they are to be passed
+ as bitmasks in integer argument(s). VOIDmode if masks are passed
+ as vectors of characteristic type. */
+ machine_mode mask_mode;
+
/* The mangling character for a given vector size. This is used
to determine the ISA mangling bit as specified in the Intel
Vector ABI. */
/* Walk the alias chain to return the function cgraph_node is alias of.
Walk through thunk, too.
- When AVAILABILITY is non-NULL, get minimal availability in the chain. */
- cgraph_node *function_symbol (enum availability *avail = NULL);
+ When AVAILABILITY is non-NULL, get minimal availability in the chain.
+ When REF is non-NULL, assume that reference happens in symbol REF
+ when determining the availability. */
+ cgraph_node *function_symbol (enum availability *avail = NULL,
+ struct symtab_node *ref = NULL);
/* Walk the alias chain to return the function cgraph_node is alias of.
Walk through non virtual thunks, too. Thus we return either a function
or a virtual thunk node.
- When AVAILABILITY is non-NULL, get minimal availability in the chain. */
+ When AVAILABILITY is non-NULL, get minimal availability in the chain.
+ When REF is non-NULL, assume that reference happens in symbol REF
+ when determining the availability. */
cgraph_node *function_or_virtual_thunk_symbol
- (enum availability *avail = NULL);
+ (enum availability *avail = NULL,
+ struct symtab_node *ref = NULL);
/* Create node representing clone of N executed COUNT times. Decrease
the execution counts from original node too.
If the new node is being inlined into another one, NEW_INLINED_TO should be
the outline function the new one is (even indirectly) inlined to.
All hooks will see this in node's global.inlined_to, when invoked.
- Can be NULL if the node is not inlined. */
- cgraph_node *create_clone (tree decl, gcov_type count, int freq,
+ Can be NULL if the node is not inlined. SUFFIX is string that is appended
+ to the original name. */
+ cgraph_node *create_clone (tree decl, profile_count count,
bool update_original,
vec<cgraph_edge *> redirect_callers,
bool call_duplication_hook,
cgraph_node *new_inlined_to,
- bitmap args_to_skip);
+ bitmap args_to_skip, const char *suffix = NULL);
/* Create callgraph node clone with new declaration. The actual body will
be copied later at compilation stage. */
If non-NULL BLOCK_TO_COPY determine what basic blocks
was copied to prevent duplications of calls that are dead
- in the clone. */
+ in the clone.
+
+ SUFFIX is string that is appended to the original name. */
cgraph_node *create_version_clone (tree new_decl,
vec<cgraph_edge *> redirect_callers,
- bitmap bbs_to_copy);
+ bitmap bbs_to_copy,
+ const char *suffix = NULL);
/* Perform function versioning.
Function versioning includes copying of the tree and
/* Add thunk alias into callgraph. The alias declaration is ALIAS and it
aliases DECL with an adjustments made into the first parameter.
- See comments in thunk_adjust for detail on the parameters. */
+ See comments in struct cgraph_thunk_info for detail on the parameters. */
cgraph_node * create_thunk (tree alias, tree, bool this_adjusting,
HOST_WIDE_INT fixed_offset,
HOST_WIDE_INT virtual_value,
/* Given function symbol, walk the alias chain to return the function node
is alias of. Do not walk through thunks.
- When AVAILABILITY is non-NULL, get minimal availability in the chain. */
+ When AVAILABILITY is non-NULL, get minimal availability in the chain.
+ When REF is non-NULL, assume that reference happens in symbol REF
+ when determining the availability. */
- cgraph_node *ultimate_alias_target (availability *availability = NULL);
+ cgraph_node *ultimate_alias_target (availability *availability = NULL,
+ symtab_node *ref = NULL);
/* Expand thunk NODE to gimple if possible.
When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
/* Create edge from a given function to CALLEE in the cgraph. */
cgraph_edge *create_edge (cgraph_node *callee,
- gcall *call_stmt, gcov_type count,
- int freq);
+ gcall *call_stmt, profile_count count);
/* Create an indirect edge with a yet-undetermined callee where the call
statement destination is a formal parameter of the caller with index
PARAM_INDEX. */
cgraph_edge *create_indirect_edge (gcall *call_stmt, int ecf_flags,
- gcov_type count, int freq,
+ profile_count count,
bool compute_indirect_info = true);
/* Like cgraph_create_edge walk the clone tree and update all clones sharing
update the edge same way as cgraph_set_call_stmt_including_clones does. */
void create_edge_including_clones (cgraph_node *callee,
gimple *old_stmt, gcall *stmt,
- gcov_type count,
- int freq,
+ profile_count count,
cgraph_inline_failed_t reason);
/* Return the callgraph edge representing the GIMPLE_CALL statement
/* Return function availability. See cgraph.h for description of individual
return values. */
- enum availability get_availability (void);
+ enum availability get_availability (symtab_node *ref = NULL);
/* Set TREE_NOTHROW on cgraph_node's decl and on aliases of the node
if any to NOTHROW. */
- void set_nothrow_flag (bool nothrow);
+ bool set_nothrow_flag (bool nothrow);
+
+ /* SET DECL_IS_MALLOC on cgraph_node's decl and on aliases of the node
+ if any. */
+ bool set_malloc_flag (bool malloc_p);
- /* Set TREE_READONLY on cgraph_node's decl and on aliases of the node
- if any to READONLY. */
- void set_const_flag (bool readonly, bool looping);
+ /* If SET_CONST is true, mark function, aliases and thunks to be ECF_CONST.
+ If SET_CONST if false, clear the flag.
+
+ When setting the flag be careful about possible interposition and
+ do not set the flag for functions that can be interposet and set pure
+ flag for functions that can bind to other definition.
+
+ Return true if any change was done. */
+
+ bool set_const_flag (bool set_const, bool looping);
/* Set DECL_PURE_P on cgraph_node's decl and on aliases of the node
- if any to PURE. */
- void set_pure_flag (bool pure, bool looping);
+ if any to PURE.
+
+ When setting the flag, be careful about possible interposition.
+ Return true if any change was done. */
+
+ bool set_pure_flag (bool pure, bool looping);
/* Call callback on function and aliases associated to the function.
When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
/* Remove the cgraph_function_version_info and cgraph_node for DECL. This
DECL is a duplicate declaration. */
- static void delete_function_version (tree decl);
+ static void delete_function_version_by_decl (tree decl);
/* Add the function FNDECL to the call graph.
Unlike finalize_function, this function is intended to be used
cgraph_thunk_info thunk;
/* Expected number of executions: calculated in profile.c. */
- gcov_type count;
+ profile_count count;
/* How to scale counts at materialization time; used to merge
LTO units with different number of profile runs. */
int count_materialization_scale;
/* True if this decl is a dispatcher for function versions. */
unsigned dispatcher_function : 1;
/* True if this decl calls a COMDAT-local function. This is set up in
- compute_inline_parameters and inline_call. */
+ compute_fn_summary and inline_call. */
unsigned calls_comdat_local : 1;
/* True if node has been created by merge operation in IPA-ICF. */
unsigned icf_merged: 1;
unsigned parallelized_function : 1;
/* True if function is part split out by ipa-split. */
unsigned split_part : 1;
+ /* True if the function appears as possible target of indirect call. */
+ unsigned indirect_call_target : 1;
private:
/* Worker for call_for_symbol_and_aliases. */
Return true if resulting context is valid.
When CONSIDER_PLACEMENT_NEW is false, reject contexts that may be made
- valid only via alocation of new polymorphic type inside by means
+ valid only via allocation of new polymorphic type inside by means
of placement new.
When CONSIDER_BASES is false, only look for actual fields, not base types
unsigned agg_contents : 1;
/* Set when this is a call through a member pointer. */
unsigned member_ptr : 1;
- /* When the previous bit is set, this one determines whether the destination
- is loaded from a parameter passed by reference. */
+ /* When the agg_contents bit is set, this one determines whether the
+ destination is loaded from a parameter passed by reference. */
unsigned by_ref : 1;
+ /* When the agg_contents bit is set, this one determines whether we can
+ deduce from the function body that the loaded value from the reference is
+ never modified between the invocation of the function and the load
+ point. */
+ unsigned guaranteed_unmodified : 1;
/* For polymorphic calls this specify whether the virtual table pointer
may have changed in between function entry and the call. */
unsigned vptr_changed : 1;
/* Turn edge into speculative call calling N2. Update
the profile so the direct call is taken COUNT times
with FREQUENCY. */
- cgraph_edge *make_speculative (cgraph_node *n2, gcov_type direct_count,
- int direct_frequency);
+ cgraph_edge *make_speculative (cgraph_node *n2, profile_count direct_count);
/* Given speculative call edge, return all three components. */
void speculative_call_info (cgraph_edge *&direct, cgraph_edge *&indirect,
/* Create clone of edge in the node N represented
by CALL_EXPR the callgraph. */
cgraph_edge * clone (cgraph_node *n, gcall *call_stmt, unsigned stmt_uid,
- gcov_type count_scale, int freq_scale, bool update_original);
+ profile_count num, profile_count den,
+ bool update_original);
/* Verify edge count and frequency. */
- bool verify_count_and_frequency ();
+ bool verify_count ();
/* Return true when call of edge can not lead to return from caller
and thus it is safe to ignore its side effects for IPA analysis
static void rebuild_references (void);
/* Expected number of executions: calculated in profile.c. */
- gcov_type count;
+ profile_count count;
cgraph_node *caller;
cgraph_node *callee;
cgraph_edge *prev_caller;
/* The stmt_uid of call_stmt. This is used by LTO to recover the call_stmt
when the function is serialized in. */
unsigned int lto_stmt_uid;
- /* Expected frequency of executions within the function.
- When set to CGRAPH_FREQ_BASE, the edge is expected to be called once
- per function call. The range is 0 to CGRAPH_FREQ_MAX. */
- int frequency;
/* Unique id of the edge. */
int uid;
/* Whether this edge was made direct by indirect inlining. */
type. */
unsigned in_polymorphic_cdtor : 1;
+ /* Return true if call must bind to current definition. */
+ bool binds_to_current_def_p ();
+
+ /* Expected frequency of executions within the function.
+ When set to CGRAPH_FREQ_BASE, the edge is expected to be called once
+ per function call. The range is 0 to CGRAPH_FREQ_MAX. */
+ int frequency ();
+
+ /* Expected frequency of executions within the function. */
+ sreal sreal_frequency ();
private:
/* Remove the edge from the list of the callers of the callee. */
void remove_caller (void);
void analyze (void);
/* Return variable availability. */
- availability get_availability (void);
+ availability get_availability (symtab_node *ref = NULL);
/* When doing LTO, read variable's constructor from disk if
it is not already present. */
/* For given variable pool node, walk the alias chain to return the function
the variable is alias of. Do not walk through thunks.
- When AVAILABILITY is non-NULL, get minimal availability in the chain. */
+ When AVAILABILITY is non-NULL, get minimal availability in the chain.
+ When REF is non-NULL, assume that reference happens in symbol REF
+ when determining the availability. */
inline varpool_node *ultimate_alias_target
- (availability *availability = NULL);
+ (availability *availability = NULL, symtab_node *ref = NULL);
/* Return node that alias is aliasing. */
inline varpool_node *get_alias_target (void);
/* Set the DECL_ASSEMBLER_NAME and update symtab hashtables. */
void change_decl_assembler_name (tree decl, tree name);
+ /* Dump symbol table to F. */
+ void dump (FILE *f);
+
+ /* Dump symbol table to stderr. */
+ inline DEBUG_FUNCTION void debug (void)
+ {
+ dump (stderr);
+ }
+
/* Return true if assembler names NAME1 and NAME2 leads to the same symbol
name. */
static bool assembler_names_equal_p (const char *name1, const char *name2);
FILE* GTY ((skip)) dump_file;
+ /* Return symbol used to separate symbol name from suffix. */
+ static char symbol_suffix_separator ();
+
+ FILE* GTY ((skip)) ipa_clones_dump_file;
+
+ hash_set <const cgraph_node *> GTY ((skip)) cloned_nodes;
+
private:
/* Allocate new callgraph node. */
inline cgraph_node * allocate_cgraph_symbol (void);
parameters of which only CALLEE can be NULL (when creating an indirect call
edge). */
cgraph_edge *create_edge (cgraph_node *caller, cgraph_node *callee,
- gcall *call_stmt, gcov_type count, int freq,
+ gcall *call_stmt, profile_count count,
bool indir_unknown_callee);
/* Put the edge onto the free list. */
/* Initialize datastructures so DECL is a function in lowered gimple form.
IN_SSA is true if the gimple is in SSA. */
-basic_block init_lowered_empty_function (tree, bool, gcov_type);
+basic_block init_lowered_empty_function (tree, bool, profile_count);
+tree thunk_adjust (gimple_stmt_iterator *, tree, bool, HOST_WIDE_INT, tree);
/* In cgraphclones.c */
tree clone_function_name_1 (const char *, const char *);
void tree_function_versioning (tree, tree, vec<ipa_replace_map *, va_gc> *,
bool, bitmap, bool, bitmap, basic_block);
+void dump_callgraph_transformation (const cgraph_node *original,
+ const cgraph_node *clone,
+ const char *suffix);
+tree cgraph_build_function_type_skip_args (tree orig_type, bitmap args_to_skip,
+ bool skip_return);
+
/* In cgraphbuild.c */
int compute_call_stmt_bb_frequency (tree, basic_block bb);
void record_references_in_initializer (tree, bool);
/* In tree-chkp.c */
extern bool chkp_function_instrumented_p (tree fndecl);
+/* In ipa-inline-analysis.c */
+void initialize_inline_failed (struct cgraph_edge *);
+bool speculation_useful_p (struct cgraph_edge *e, bool anticipate_inlining);
+
/* Return true when the symbol is real symbol, i.e. it is not inline clone
or abstract function kept for debug info purposes only. */
inline bool
/* Walk the alias chain to return the symbol NODE is alias of.
If NODE is not an alias, return NODE.
- When AVAILABILITY is non-NULL, get minimal availability in the chain. */
+ When AVAILABILITY is non-NULL, get minimal availability in the chain.
+ When REF is non-NULL, assume that reference happens in symbol REF
+ when determining the availability. */
inline symtab_node *
-symtab_node::ultimate_alias_target (enum availability *availability)
+symtab_node::ultimate_alias_target (enum availability *availability,
+ symtab_node *ref)
{
if (!alias)
{
if (availability)
- *availability = get_availability ();
+ *availability = get_availability (ref);
return this;
}
- return ultimate_alias_target_1 (availability);
+ return ultimate_alias_target_1 (availability, ref);
}
/* Given function symbol, walk the alias chain to return the function node
is alias of. Do not walk through thunks.
- When AVAILABILITY is non-NULL, get minimal availability in the chain. */
+ When AVAILABILITY is non-NULL, get minimal availability in the chain.
+ When REF is non-NULL, assume that reference happens in symbol REF
+ when determining the availability. */
inline cgraph_node *
-cgraph_node::ultimate_alias_target (enum availability *availability)
+cgraph_node::ultimate_alias_target (enum availability *availability,
+ symtab_node *ref)
{
cgraph_node *n = dyn_cast <cgraph_node *>
- (symtab_node::ultimate_alias_target (availability));
+ (symtab_node::ultimate_alias_target (availability, ref));
if (!n && availability)
*availability = AVAIL_NOT_AVAILABLE;
return n;
/* For given variable pool node, walk the alias chain to return the function
the variable is alias of. Do not walk through thunks.
- When AVAILABILITY is non-NULL, get minimal availability in the chain. */
+ When AVAILABILITY is non-NULL, get minimal availability in the chain.
+ When REF is non-NULL, assume that reference happens in symbol REF
+ when determining the availability. */
inline varpool_node *
-varpool_node::ultimate_alias_target (availability *availability)
+varpool_node::ultimate_alias_target (availability *availability,
+ symtab_node *ref)
{
varpool_node *n = dyn_cast <varpool_node *>
- (symtab_node::ultimate_alias_target (availability));
+ (symtab_node::ultimate_alias_target (availability, ref));
if (!n && availability)
*availability = AVAIL_NOT_AVAILABLE;
callee->callers = next_caller;
}
+/* Return true if call must bind to current definition. */
+
+inline bool
+cgraph_edge::binds_to_current_def_p ()
+{
+ if (callee)
+ return callee->binds_to_current_def_p (caller);
+ else
+ return false;
+}
+
+/* Expected frequency of executions within the function.
+ When set to CGRAPH_FREQ_BASE, the edge is expected to be called once
+ per function call. The range is 0 to CGRAPH_FREQ_MAX. */
+
+inline int
+cgraph_edge::frequency ()
+{
+ return count.to_cgraph_frequency (caller->global.inlined_to
+ ? caller->global.inlined_to->count
+ : caller->count);
+}
+
+
/* Return true if the TM_CLONE bit is set for a given FNDECL. */
static inline bool
decl_is_tm_clone (const_tree fndecl)
return cgraph_node::get_create (node);
}
-/* Return availability of NODE. */
+/* Return availability of NODE when referenced from REF. */
inline enum availability
-symtab_node::get_availability (void)
+symtab_node::get_availability (symtab_node *ref)
{
if (is_a <cgraph_node *> (this))
- return dyn_cast <cgraph_node *> (this)->get_availability ();
+ return dyn_cast <cgraph_node *> (this)->get_availability (ref);
else
- return dyn_cast <varpool_node *> (this)->get_availability ();
+ return dyn_cast <varpool_node *> (this)->get_availability (ref);
}
/* Call calback on symtab node and aliases associated to this node.
- When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
- skipped. */
+ When INCLUDE_OVERWRITABLE is false, overwritable symbols are skipped. */
inline bool
symtab_node::call_for_symbol_and_aliases (bool (*callback) (symtab_node *,
void *data,
bool include_overwritable)
{
- if (callback (this, data))
- return true;
+ if (include_overwritable
+ || get_availability () > AVAIL_INTERPOSABLE)
+ {
+ if (callback (this, data))
+ return true;
+ }
if (has_aliases_p ())
return call_for_symbol_and_aliases_1 (callback, data, include_overwritable);
return false;
}
/* Call callback on function and aliases associated to the function.
- When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
+ When INCLUDE_OVERWRITABLE is false, overwritable symbols are
skipped. */
inline bool
void *data,
bool include_overwritable)
{
- if (callback (this, data))
- return true;
+ if (include_overwritable
+ || get_availability () > AVAIL_INTERPOSABLE)
+ {
+ if (callback (this, data))
+ return true;
+ }
if (has_aliases_p ())
return call_for_symbol_and_aliases_1 (callback, data, include_overwritable);
return false;
}
/* Call calback on varpool symbol and aliases associated to varpool symbol.
- When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
+ When INCLUDE_OVERWRITABLE is false, overwritable symbols are
skipped. */
inline bool
void *data,
bool include_overwritable)
{
- if (callback (this, data))
- return true;
+ if (include_overwritable
+ || get_availability () > AVAIL_INTERPOSABLE)
+ {
+ if (callback (this, data))
+ return true;
+ }
if (has_aliases_p ())
return call_for_symbol_and_aliases_1 (callback, data, include_overwritable);
return false;