/* Callgraph handling code.
- Copyright (C) 2003-2016 Free Software Foundation, Inc.
+ Copyright (C) 2003-2018 Free Software Foundation, Inc.
Contributed by Jan Hubicka
This file is part of GCC.
#ifndef GCC_CGRAPH_H
#define GCC_CGRAPH_H
+#include "profile-count.h"
#include "ipa-ref.h"
#include "plugin-api.h"
symtab_node
{
public:
+ friend class symbol_table;
+
/* Return name. */
const char *name () const;
+ /* Return dump name. */
+ const char *dump_name () const;
+
/* Return asm name. */
- const char * asm_name () const;
+ const char *asm_name () const;
+
+ /* Return dump name with assembler name. */
+ const char *dump_asm_name () const;
/* Add node into symbol table. This function is not used directly, but via
cgraph/varpool node creation routines. */
enum ipa_ref_use use_type, gimple *stmt);
/* If VAL is a reference to a function or a variable, add a reference from
- this symtab_node to the corresponding symbol table node. USE_TYPE specify
- type of the use and STMT the statement (if it exists). Return the new
+ this symtab_node to the corresponding symbol table node. Return the new
reference or NULL if none was created. */
- ipa_ref *maybe_create_reference (tree val, enum ipa_ref_use use_type,
- gimple *stmt);
+ ipa_ref *maybe_create_reference (tree val, gimple *stmt);
/* Clone all references from symtab NODE to this symtab_node. */
void clone_references (symtab_node *node);
/* Walk the alias chain to return the symbol NODE is alias of.
If NODE is not an alias, return NODE.
- When AVAILABILITY is non-NULL, get minimal availability in the chain. */
- symtab_node *ultimate_alias_target (enum availability *avail = NULL);
+ When AVAILABILITY is non-NULL, get minimal availability in the chain.
+ When REF is non-NULL, assume that reference happens in symbol REF
+ when determining the availability. */
+ symtab_node *ultimate_alias_target (enum availability *avail = NULL,
+ struct symtab_node *ref = NULL);
/* Return next reachable static symbol with initializer after NODE. */
inline symtab_node *next_defined_symbol (void);
/* Return the initialization priority. */
priority_type get_init_priority ();
- /* Return availability of NODE. */
- enum availability get_availability (void);
+ /* Return availability of NODE when referenced from REF. */
+ enum availability get_availability (symtab_node *ref = NULL);
+
+ /* Return true if NODE binds to current definition in final executable
+ when referenced from REF. If REF is NULL return conservative value
+ for any reference. */
+ bool binds_to_current_def_p (symtab_node *ref = NULL);
/* Make DECL local. */
void make_decl_local (void);
or abstract function kept for debug info purposes only. */
bool real_symbol_p (void);
+ /* Return true when the symbol needs to be output to the LTO symbol table. */
+ bool output_to_lto_symbol_table_p (void);
+
/* Determine if symbol declaration is needed. That is, visible to something
either outside this translation unit, something magic in the system
configury. This function is used just during symbol creation. */
bool needed_p (void);
+ /* Return true if this symbol is a function from the C frontend specified
+ directly in RTL form (with "__RTL"). */
+ bool native_rtl_p () const;
+
/* Return true when there are references to the node. */
bool referred_to_p (bool include_self = true);
Return NULL if there's no such node. */
static symtab_node *get_for_asmname (const_tree asmname);
- /* Dump symbol table to F. */
- static void dump_table (FILE *);
-
- /* Dump symbol table to stderr. */
- static inline DEBUG_FUNCTION void debug_symtab (void)
- {
- dump_table (stderr);
- }
-
/* Verify symbol table for internal consistency. */
static DEBUG_FUNCTION void verify_symtab_nodes (void);
/* Set when symbol can be streamed into bytecode for offloading. */
unsigned offloadable : 1;
+ /* Set when symbol is an IFUNC resolver. */
+ unsigned ifunc_resolver : 1;
+
/* Ordering of all symtab entries. */
int order;
static bool noninterposable_alias (symtab_node *node, void *data);
/* Worker for ultimate_alias_target. */
- symtab_node *ultimate_alias_target_1 (enum availability *avail = NULL);
+ symtab_node *ultimate_alias_target_1 (enum availability *avail = NULL,
+ symtab_node *ref = NULL);
+
+ /* Get dump name with normal or assembly name. */
+ const char *get_dump_name (bool asm_name_p) const;
};
inline void
extern const char * const ld_plugin_symbol_resolution_names[];
extern const char * const tls_model_names[];
-/* Information about thunk, used only for same body aliases. */
+/* Sub-structure of cgraph_node. Holds information about thunk, used only for
+ same body aliases.
+
+ Thunks are basically wrappers around methods which are introduced in case
+ of multiple inheritance in order to adjust the value of the "this" pointer
+ or of the returned value.
+
+ In the case of this-adjusting thunks, each back-end can override the
+ can_output_mi_thunk/output_mi_thunk target hooks to generate a minimal thunk
+ (with a tail call for instance) directly as assembly. For the default hook
+ or for the case where the can_output_mi_thunk hooks return false, the thunk
+ is gimplified and lowered using the regular machinery. */
struct GTY(()) cgraph_thunk_info {
- /* Information about the thunk. */
+ /* Offset used to adjust "this". */
HOST_WIDE_INT fixed_offset;
+
+ /* Offset in the virtual table to get the offset to adjust "this". Valid iff
+ VIRTUAL_OFFSET_P is true. */
HOST_WIDE_INT virtual_value;
+
+ /* Thunk target, i.e. the method that this thunk wraps. Depending on the
+ TARGET_USE_LOCAL_THUNK_ALIAS_P macro, this may have to be a new alias. */
tree alias;
+
+ /* Nonzero for a "this" adjusting thunk and zero for a result adjusting
+ thunk. */
bool this_adjusting;
+
+ /* If true, this thunk is what we call a virtual thunk. In this case:
+ * for this-adjusting thunks, after the FIXED_OFFSET based adjustment is
+ done, add to the result the offset found in the vtable at:
+ vptr + VIRTUAL_VALUE
+ * for result-adjusting thunks, the FIXED_OFFSET adjustment is done after
+ the virtual one. */
bool virtual_offset_p;
+
+ /* ??? True for special kind of thunks, seems related to instrumentation. */
bool add_pointer_bounds_args;
- /* Set to true when alias node is thunk. */
+
+ /* Set to true when alias node (the cgraph_node to which this struct belong)
+ is a thunk. Access to any other fields is invalid if this is false. */
bool thunk_p;
};
/* Max hardware vector size in bits for floating point vectors. */
unsigned int vecsize_float;
+ /* Machine mode of the mask argument(s), if they are to be passed
+ as bitmasks in integer argument(s). VOIDmode if masks are passed
+ as vectors of characteristic type. */
+ machine_mode mask_mode;
+
/* The mangling character for a given vector size. This is used
to determine the ISA mangling bit as specified in the Intel
Vector ABI. */
otherwise false. */
unsigned int inbranch : 1;
- /* True if this is a Cilk Plus variant. */
- unsigned int cilk_elemental : 1;
-
/* Doubly linked list of SIMD clones. */
cgraph_node *prev_clone, *next_clone;
struct GTY((tag ("SYMTAB_FUNCTION"))) cgraph_node : public symtab_node {
public:
+ friend class symbol_table;
+
/* Remove the node from cgraph and all inline clones inlined into it.
Skip however removal of FORBIDDEN_NODE and return true if it needs to be
removed. This allows to call the function from outer loop walking clone
/* Walk the alias chain to return the function cgraph_node is alias of.
Walk through thunk, too.
- When AVAILABILITY is non-NULL, get minimal availability in the chain. */
- cgraph_node *function_symbol (enum availability *avail = NULL);
+ When AVAILABILITY is non-NULL, get minimal availability in the chain.
+ When REF is non-NULL, assume that reference happens in symbol REF
+ when determining the availability. */
+ cgraph_node *function_symbol (enum availability *avail = NULL,
+ struct symtab_node *ref = NULL);
/* Walk the alias chain to return the function cgraph_node is alias of.
Walk through non virtual thunks, too. Thus we return either a function
or a virtual thunk node.
- When AVAILABILITY is non-NULL, get minimal availability in the chain. */
+ When AVAILABILITY is non-NULL, get minimal availability in the chain.
+ When REF is non-NULL, assume that reference happens in symbol REF
+ when determining the availability. */
cgraph_node *function_or_virtual_thunk_symbol
- (enum availability *avail = NULL);
+ (enum availability *avail = NULL,
+ struct symtab_node *ref = NULL);
/* Create node representing clone of N executed COUNT times. Decrease
the execution counts from original node too.
If the new node is being inlined into another one, NEW_INLINED_TO should be
the outline function the new one is (even indirectly) inlined to.
All hooks will see this in node's global.inlined_to, when invoked.
- Can be NULL if the node is not inlined. */
- cgraph_node *create_clone (tree decl, gcov_type count, int freq,
+ Can be NULL if the node is not inlined. SUFFIX is string that is appended
+ to the original name. */
+ cgraph_node *create_clone (tree decl, profile_count count,
bool update_original,
vec<cgraph_edge *> redirect_callers,
bool call_duplication_hook,
cgraph_node *new_inlined_to,
- bitmap args_to_skip);
+ bitmap args_to_skip, const char *suffix = NULL);
/* Create callgraph node clone with new declaration. The actual body will
be copied later at compilation stage. */
If non-NULL BLOCK_TO_COPY determine what basic blocks
was copied to prevent duplications of calls that are dead
- in the clone. */
+ in the clone.
+
+ SUFFIX is string that is appended to the original name. */
cgraph_node *create_version_clone (tree new_decl,
vec<cgraph_edge *> redirect_callers,
- bitmap bbs_to_copy);
+ bitmap bbs_to_copy,
+ const char *suffix = NULL);
/* Perform function versioning.
Function versioning includes copying of the tree and
/* Add thunk alias into callgraph. The alias declaration is ALIAS and it
aliases DECL with an adjustments made into the first parameter.
- See comments in thunk_adjust for detail on the parameters. */
+ See comments in struct cgraph_thunk_info for detail on the parameters. */
cgraph_node * create_thunk (tree alias, tree, bool this_adjusting,
HOST_WIDE_INT fixed_offset,
HOST_WIDE_INT virtual_value,
/* Given function symbol, walk the alias chain to return the function node
is alias of. Do not walk through thunks.
- When AVAILABILITY is non-NULL, get minimal availability in the chain. */
+ When AVAILABILITY is non-NULL, get minimal availability in the chain.
+ When REF is non-NULL, assume that reference happens in symbol REF
+ when determining the availability. */
- cgraph_node *ultimate_alias_target (availability *availability = NULL);
+ cgraph_node *ultimate_alias_target (availability *availability = NULL,
+ symtab_node *ref = NULL);
/* Expand thunk NODE to gimple if possible.
When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
/* Create edge from a given function to CALLEE in the cgraph. */
cgraph_edge *create_edge (cgraph_node *callee,
- gcall *call_stmt, gcov_type count,
- int freq);
+ gcall *call_stmt, profile_count count);
/* Create an indirect edge with a yet-undetermined callee where the call
statement destination is a formal parameter of the caller with index
PARAM_INDEX. */
cgraph_edge *create_indirect_edge (gcall *call_stmt, int ecf_flags,
- gcov_type count, int freq,
+ profile_count count,
bool compute_indirect_info = true);
/* Like cgraph_create_edge walk the clone tree and update all clones sharing
update the edge same way as cgraph_set_call_stmt_including_clones does. */
void create_edge_including_clones (cgraph_node *callee,
gimple *old_stmt, gcall *stmt,
- gcov_type count,
- int freq,
+ profile_count count,
cgraph_inline_failed_t reason);
/* Return the callgraph edge representing the GIMPLE_CALL statement
/* Return function availability. See cgraph.h for description of individual
return values. */
- enum availability get_availability (void);
+ enum availability get_availability (symtab_node *ref = NULL);
/* Set TREE_NOTHROW on cgraph_node's decl and on aliases of the node
if any to NOTHROW. */
- void set_nothrow_flag (bool nothrow);
+ bool set_nothrow_flag (bool nothrow);
+
+ /* SET DECL_IS_MALLOC on cgraph_node's decl and on aliases of the node
+ if any. */
+ bool set_malloc_flag (bool malloc_p);
+
+ /* If SET_CONST is true, mark function, aliases and thunks to be ECF_CONST.
+ If SET_CONST if false, clear the flag.
+
+ When setting the flag be careful about possible interposition and
+ do not set the flag for functions that can be interposet and set pure
+ flag for functions that can bind to other definition.
- /* Set TREE_READONLY on cgraph_node's decl and on aliases of the node
- if any to READONLY. */
- void set_const_flag (bool readonly, bool looping);
+ Return true if any change was done. */
+
+ bool set_const_flag (bool set_const, bool looping);
/* Set DECL_PURE_P on cgraph_node's decl and on aliases of the node
- if any to PURE. */
- void set_pure_flag (bool pure, bool looping);
+ if any to PURE.
+
+ When setting the flag, be careful about possible interposition.
+ Return true if any change was done. */
+
+ bool set_pure_flag (bool pure, bool looping);
/* Call callback on function and aliases associated to the function.
When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
dump_cgraph (stderr);
}
+ /* Get unique identifier of the node. */
+ inline int get_uid ()
+ {
+ return m_uid;
+ }
+
/* Record that DECL1 and DECL2 are semantically identical function
versions. */
static void record_function_versions (tree decl1, tree decl2);
/* Remove the cgraph_function_version_info and cgraph_node for DECL. This
DECL is a duplicate declaration. */
- static void delete_function_version (tree decl);
+ static void delete_function_version_by_decl (tree decl);
/* Add the function FNDECL to the call graph.
Unlike finalize_function, this function is intended to be used
cgraph_node *prev_sibling_clone;
cgraph_node *clones;
cgraph_node *clone_of;
- /* If instrumentation_clone is 1 then instrumented_version points
- to the original function used to make instrumented version.
- Otherwise points to instrumented version of the function. */
- cgraph_node *instrumented_version;
- /* If instrumentation_clone is 1 then orig_decl is the original
- function declaration. */
- tree orig_decl;
/* For functions with many calls sites it holds map from call expression
to the edge to speed up cgraph_edge function. */
hash_table<cgraph_edge_hasher> *GTY(()) call_site_hash;
cgraph_thunk_info thunk;
/* Expected number of executions: calculated in profile.c. */
- gcov_type count;
+ profile_count count;
/* How to scale counts at materialization time; used to merge
LTO units with different number of profile runs. */
int count_materialization_scale;
- /* Unique id of the node. */
- int uid;
- /* Summary unique id of the node. */
- int summary_uid;
/* ID assigned by the profiling. */
unsigned int profile_id;
/* Time profiler: first run of function. */
/* True if this decl is a dispatcher for function versions. */
unsigned dispatcher_function : 1;
/* True if this decl calls a COMDAT-local function. This is set up in
- compute_inline_parameters and inline_call. */
+ compute_fn_summary and inline_call. */
unsigned calls_comdat_local : 1;
/* True if node has been created by merge operation in IPA-ICF. */
unsigned icf_merged: 1;
- /* True when function is clone created for Pointer Bounds Checker
- instrumentation. */
- unsigned instrumentation_clone : 1;
/* True if call to node can't result in a call to free, munmap or
other operation that could make previously non-trapping memory
accesses trapping. */
unsigned indirect_call_target : 1;
private:
+ /* Unique id of the node. */
+ int m_uid;
+
/* Worker for call_for_symbol_and_aliases. */
bool call_for_symbol_and_aliases_1 (bool (*callback) (cgraph_node *,
void *),
Return true if resulting context is valid.
When CONSIDER_PLACEMENT_NEW is false, reject contexts that may be made
- valid only via alocation of new polymorphic type inside by means
+ valid only via allocation of new polymorphic type inside by means
of placement new.
When CONSIDER_BASES is false, only look for actual fields, not base types
unsigned agg_contents : 1;
/* Set when this is a call through a member pointer. */
unsigned member_ptr : 1;
- /* When the previous bit is set, this one determines whether the destination
- is loaded from a parameter passed by reference. */
+ /* When the agg_contents bit is set, this one determines whether the
+ destination is loaded from a parameter passed by reference. */
unsigned by_ref : 1;
+ /* When the agg_contents bit is set, this one determines whether we can
+ deduce from the function body that the loaded value from the reference is
+ never modified between the invocation of the function and the load
+ point. */
+ unsigned guaranteed_unmodified : 1;
/* For polymorphic calls this specify whether the virtual table pointer
may have changed in between function entry and the call. */
unsigned vptr_changed : 1;
struct GTY((chain_next ("%h.next_caller"), chain_prev ("%h.prev_caller"),
for_user)) cgraph_edge {
friend class cgraph_node;
+ friend class symbol_table;
/* Remove the edge in the cgraph. */
void remove (void);
/* Turn edge into speculative call calling N2. Update
the profile so the direct call is taken COUNT times
with FREQUENCY. */
- cgraph_edge *make_speculative (cgraph_node *n2, gcov_type direct_count,
- int direct_frequency);
+ cgraph_edge *make_speculative (cgraph_node *n2, profile_count direct_count);
/* Given speculative call edge, return all three components. */
void speculative_call_info (cgraph_edge *&direct, cgraph_edge *&indirect,
/* Create clone of edge in the node N represented
by CALL_EXPR the callgraph. */
cgraph_edge * clone (cgraph_node *n, gcall *call_stmt, unsigned stmt_uid,
- gcov_type count_scale, int freq_scale, bool update_original);
+ profile_count num, profile_count den,
+ bool update_original);
/* Verify edge count and frequency. */
- bool verify_count_and_frequency ();
+ bool verify_count ();
/* Return true when call of edge can not lead to return from caller
and thus it is safe to ignore its side effects for IPA analysis
/* Return true if the call can be hot. */
bool maybe_hot_p (void);
+ /* Get unique identifier of the edge. */
+ inline int get_uid ()
+ {
+ return m_uid;
+ }
+
/* Rebuild cgraph edges for current function node. This needs to be run after
passes that don't update the cgraph. */
static unsigned int rebuild_edges (void);
static void rebuild_references (void);
/* Expected number of executions: calculated in profile.c. */
- gcov_type count;
+ profile_count count;
cgraph_node *caller;
cgraph_node *callee;
cgraph_edge *prev_caller;
/* The stmt_uid of call_stmt. This is used by LTO to recover the call_stmt
when the function is serialized in. */
unsigned int lto_stmt_uid;
- /* Expected frequency of executions within the function.
- When set to CGRAPH_FREQ_BASE, the edge is expected to be called once
- per function call. The range is 0 to CGRAPH_FREQ_MAX. */
- int frequency;
- /* Unique id of the edge. */
- int uid;
/* Whether this edge was made direct by indirect inlining. */
unsigned int indirect_inlining_edge : 1;
/* Whether this edge describes an indirect call with an undetermined
type. */
unsigned in_polymorphic_cdtor : 1;
+ /* Return true if call must bind to current definition. */
+ bool binds_to_current_def_p ();
+
+ /* Expected frequency of executions within the function.
+ When set to CGRAPH_FREQ_BASE, the edge is expected to be called once
+ per function call. The range is 0 to CGRAPH_FREQ_MAX. */
+ int frequency ();
+
+ /* Expected frequency of executions within the function. */
+ sreal sreal_frequency ();
private:
+ /* Unique id of the edge. */
+ int m_uid;
+
/* Remove the edge from the list of the callers of the callee. */
void remove_caller (void);
void analyze (void);
/* Return variable availability. */
- availability get_availability (void);
+ availability get_availability (symtab_node *ref = NULL);
/* When doing LTO, read variable's constructor from disk if
it is not already present. */
/* For given variable pool node, walk the alias chain to return the function
the variable is alias of. Do not walk through thunks.
- When AVAILABILITY is non-NULL, get minimal availability in the chain. */
+ When AVAILABILITY is non-NULL, get minimal availability in the chain.
+ When REF is non-NULL, assume that reference happens in symbol REF
+ when determining the availability. */
inline varpool_node *ultimate_alias_target
- (availability *availability = NULL);
+ (availability *availability = NULL, symtab_node *ref = NULL);
/* Return node that alias is aliasing. */
inline varpool_node *get_alias_target (void);
friend class cgraph_node;
friend class cgraph_edge;
- symbol_table (): cgraph_max_summary_uid (1)
+ symbol_table (): cgraph_max_uid (1), edges_max_uid (1)
{
}
/* Allocate new callgraph node and insert it into basic data structures. */
cgraph_node *create_empty (void);
- /* Release a callgraph NODE with UID and put in to the list
- of free nodes. */
- void release_symbol (cgraph_node *node, int uid);
+ /* Release a callgraph NODE. */
+ void release_symbol (cgraph_node *node);
/* Output all variables enqueued to be assembled. */
bool output_variables (void);
/* Set the DECL_ASSEMBLER_NAME and update symtab hashtables. */
void change_decl_assembler_name (tree decl, tree name);
+ /* Dump symbol table to F. */
+ void dump (FILE *f);
+
+ /* Dump symbol table to stderr. */
+ void DEBUG_FUNCTION debug (void);
+
/* Return true if assembler names NAME1 and NAME2 leads to the same symbol
name. */
static bool assembler_names_equal_p (const char *name1, const char *name2);
int cgraph_count;
int cgraph_max_uid;
- int cgraph_max_summary_uid;
int edges_count;
int edges_max_uid;
/* Return symbol used to separate symbol name from suffix. */
static char symbol_suffix_separator ();
+ FILE* GTY ((skip)) ipa_clones_dump_file;
+
+ hash_set <const cgraph_node *> GTY ((skip)) cloned_nodes;
+
private:
/* Allocate new callgraph node. */
inline cgraph_node * allocate_cgraph_symbol (void);
parameters of which only CALLEE can be NULL (when creating an indirect call
edge). */
cgraph_edge *create_edge (cgraph_node *caller, cgraph_node *callee,
- gcall *call_stmt, gcov_type count, int freq,
+ gcall *call_stmt, profile_count count,
bool indir_unknown_callee);
/* Put the edge onto the free list. */
/* Initialize datastructures so DECL is a function in lowered gimple form.
IN_SSA is true if the gimple is in SSA. */
-basic_block init_lowered_empty_function (tree, bool, gcov_type);
+basic_block init_lowered_empty_function (tree, bool, profile_count);
+tree thunk_adjust (gimple_stmt_iterator *, tree, bool, HOST_WIDE_INT, tree);
/* In cgraphclones.c */
tree clone_function_name_1 (const char *, const char *);
void tree_function_versioning (tree, tree, vec<ipa_replace_map *, va_gc> *,
bool, bitmap, bool, bitmap, basic_block);
+void dump_callgraph_transformation (const cgraph_node *original,
+ const cgraph_node *clone,
+ const char *suffix);
+tree cgraph_build_function_type_skip_args (tree orig_type, bitmap args_to_skip,
+ bool skip_return);
+
/* In cgraphbuild.c */
int compute_call_stmt_bb_frequency (tree, basic_block bb);
void record_references_in_initializer (tree, bool);
/* In varpool.c */
tree ctor_for_folding (tree);
-/* In tree-chkp.c */
-extern bool chkp_function_instrumented_p (tree fndecl);
+/* In ipa-inline-analysis.c */
+void initialize_inline_failed (struct cgraph_edge *);
+bool speculation_useful_p (struct cgraph_edge *e, bool anticipate_inlining);
/* Return true when the symbol is real symbol, i.e. it is not inline clone
or abstract function kept for debug info purposes only. */
{
ipa_ref *ref = NULL;
iterate_reference (0, ref);
- if (ref->use == IPA_REF_CHKP)
- iterate_reference (1, ref);
gcc_checking_assert (ref->use == IPA_REF_ALIAS);
return ref->referred;
}
/* Release a callgraph NODE with UID and put in to the list of free nodes. */
inline void
-symbol_table::release_symbol (cgraph_node *node, int uid)
+symbol_table::release_symbol (cgraph_node *node)
{
cgraph_count--;
list. */
memset (node, 0, sizeof (*node));
node->type = SYMTAB_FUNCTION;
- node->uid = uid;
SET_NEXT_FREE_NODE (node, free_nodes);
free_nodes = node;
}
free_nodes = NEXT_FREE_NODE (node);
}
else
- {
- node = ggc_cleared_alloc<cgraph_node> ();
- node->uid = cgraph_max_uid++;
- }
+ node = ggc_cleared_alloc<cgraph_node> ();
- node->summary_uid = cgraph_max_summary_uid++;
+ node->m_uid = cgraph_max_uid++;
return node;
}
{
gcc_assert (!global.inlined_to);
return (!force_output && !address_taken
+ && !ifunc_resolver
&& !used_from_other_partition
&& !DECL_VIRTUAL_P (decl)
&& !DECL_STATIC_CONSTRUCTOR (decl)
cgraph_node::can_remove_if_no_direct_calls_and_refs_p (void)
{
gcc_checking_assert (!global.inlined_to);
- /* Instrumentation clones should not be removed before
- instrumentation happens. New callers may appear after
- instrumentation. */
- if (instrumentation_clone
- && !chkp_function_instrumented_p (decl))
- return false;
/* Extern inlines can always go, we will use the external definition. */
if (DECL_EXTERNAL (decl))
return true;
/* Walk the alias chain to return the symbol NODE is alias of.
If NODE is not an alias, return NODE.
- When AVAILABILITY is non-NULL, get minimal availability in the chain. */
+ When AVAILABILITY is non-NULL, get minimal availability in the chain.
+ When REF is non-NULL, assume that reference happens in symbol REF
+ when determining the availability. */
inline symtab_node *
-symtab_node::ultimate_alias_target (enum availability *availability)
+symtab_node::ultimate_alias_target (enum availability *availability,
+ symtab_node *ref)
{
if (!alias)
{
if (availability)
- *availability = get_availability ();
+ *availability = get_availability (ref);
return this;
}
- return ultimate_alias_target_1 (availability);
+ return ultimate_alias_target_1 (availability, ref);
}
/* Given function symbol, walk the alias chain to return the function node
is alias of. Do not walk through thunks.
- When AVAILABILITY is non-NULL, get minimal availability in the chain. */
+ When AVAILABILITY is non-NULL, get minimal availability in the chain.
+ When REF is non-NULL, assume that reference happens in symbol REF
+ when determining the availability. */
inline cgraph_node *
-cgraph_node::ultimate_alias_target (enum availability *availability)
+cgraph_node::ultimate_alias_target (enum availability *availability,
+ symtab_node *ref)
{
cgraph_node *n = dyn_cast <cgraph_node *>
- (symtab_node::ultimate_alias_target (availability));
+ (symtab_node::ultimate_alias_target (availability, ref));
if (!n && availability)
*availability = AVAIL_NOT_AVAILABLE;
return n;
/* For given variable pool node, walk the alias chain to return the function
the variable is alias of. Do not walk through thunks.
- When AVAILABILITY is non-NULL, get minimal availability in the chain. */
+ When AVAILABILITY is non-NULL, get minimal availability in the chain.
+ When REF is non-NULL, assume that reference happens in symbol REF
+ when determining the availability. */
inline varpool_node *
-varpool_node::ultimate_alias_target (availability *availability)
+varpool_node::ultimate_alias_target (availability *availability,
+ symtab_node *ref)
{
varpool_node *n = dyn_cast <varpool_node *>
- (symtab_node::ultimate_alias_target (availability));
+ (symtab_node::ultimate_alias_target (availability, ref));
if (!n && availability)
*availability = AVAIL_NOT_AVAILABLE;
callee->callers = next_caller;
}
+/* Return true if call must bind to current definition. */
+
+inline bool
+cgraph_edge::binds_to_current_def_p ()
+{
+ if (callee)
+ return callee->binds_to_current_def_p (caller);
+ else
+ return false;
+}
+
+/* Expected frequency of executions within the function.
+ When set to CGRAPH_FREQ_BASE, the edge is expected to be called once
+ per function call. The range is 0 to CGRAPH_FREQ_MAX. */
+
+inline int
+cgraph_edge::frequency ()
+{
+ return count.to_cgraph_frequency (caller->global.inlined_to
+ ? caller->global.inlined_to->count
+ : caller->count);
+}
+
+
/* Return true if the TM_CLONE bit is set for a given FNDECL. */
static inline bool
decl_is_tm_clone (const_tree fndecl)
return cgraph_node::get_create (node);
}
-/* Return availability of NODE. */
+/* Return availability of NODE when referenced from REF. */
inline enum availability
-symtab_node::get_availability (void)
+symtab_node::get_availability (symtab_node *ref)
{
if (is_a <cgraph_node *> (this))
- return dyn_cast <cgraph_node *> (this)->get_availability ();
+ return dyn_cast <cgraph_node *> (this)->get_availability (ref);
else
- return dyn_cast <varpool_node *> (this)->get_availability ();
+ return dyn_cast <varpool_node *> (this)->get_availability (ref);
}
/* Call calback on symtab node and aliases associated to this node.
- When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
- skipped. */
+ When INCLUDE_OVERWRITABLE is false, overwritable symbols are skipped. */
inline bool
symtab_node::call_for_symbol_and_aliases (bool (*callback) (symtab_node *,
void *data,
bool include_overwritable)
{
- if (callback (this, data))
- return true;
+ if (include_overwritable
+ || get_availability () > AVAIL_INTERPOSABLE)
+ {
+ if (callback (this, data))
+ return true;
+ }
if (has_aliases_p ())
return call_for_symbol_and_aliases_1 (callback, data, include_overwritable);
return false;
}
/* Call callback on function and aliases associated to the function.
- When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
+ When INCLUDE_OVERWRITABLE is false, overwritable symbols are
skipped. */
inline bool
void *data,
bool include_overwritable)
{
- if (callback (this, data))
- return true;
+ if (include_overwritable
+ || get_availability () > AVAIL_INTERPOSABLE)
+ {
+ if (callback (this, data))
+ return true;
+ }
if (has_aliases_p ())
return call_for_symbol_and_aliases_1 (callback, data, include_overwritable);
return false;
}
/* Call calback on varpool symbol and aliases associated to varpool symbol.
- When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
+ When INCLUDE_OVERWRITABLE is false, overwritable symbols are
skipped. */
inline bool
void *data,
bool include_overwritable)
{
- if (callback (this, data))
- return true;
+ if (include_overwritable
+ || get_availability () > AVAIL_INTERPOSABLE)
+ {
+ if (callback (this, data))
+ return true;
+ }
if (has_aliases_p ())
return call_for_symbol_and_aliases_1 (callback, data, include_overwritable);
return false;
return (!outer_type && !speculative_outer_type);
}
-/* Return true if NODE is local. Instrumentation clones are counted as local
- only when original function is local. */
-
-static inline bool
-cgraph_local_p (cgraph_node *node)
-{
- if (!node->instrumentation_clone || !node->instrumented_version)
- return node->local.local;
-
- return node->local.local && node->instrumented_version->local.local;
-}
-
/* When using fprintf (or similar), problems can arise with
transient generated strings. Many string-generation APIs
only support one result being alive at once (e.g. by