* cgraph.c (dump_cgraph_node): Update.
* cgraph.h (cgraph_local_info): Break out inline summary.
* cgraphunit.c (cgraph_process_new_functions): Use inliner analysis
hook.
* ipa-inline (inline_summary): New accestor function.
(cgraph_clone_inlined_nodes, cgraph_check_inline_limits,
cgraph_decide_inlining, compute_inline_parameters): Update.
* ipa.c (cgraph_remove_unreachable_nodes): Remove statistics.
From-SVN: r135037
+2008-05-07 Jan Hubicka <jh@suse.cz>
+
+ * cgraph.c (dump_cgraph_node): Update.
+ * cgraph.h (cgraph_local_info): Break out inline summary.
+ * cgraphunit.c (cgraph_process_new_functions): Use inliner analysis
+ hook.
+ * ipa-inline (inline_summary): New accestor function.
+ (cgraph_clone_inlined_nodes, cgraph_check_inline_limits,
+ cgraph_decide_inlining, compute_inline_parameters): Update.
+ * ipa.c (cgraph_remove_unreachable_nodes): Remove statistics.
+
2008-05-07 Maxim Kuvyrkov <maxim@codesourcery.com>
Cleanup ColdFire scheduling support and add V4 pipeline model.
if (node->count)
fprintf (f, " executed "HOST_WIDEST_INT_PRINT_DEC"x",
(HOST_WIDEST_INT)node->count);
- if (node->local.self_insns)
- fprintf (f, " %i insns", node->local.self_insns);
- if (node->global.insns && node->global.insns != node->local.self_insns)
+ if (node->local.inline_summary.self_insns)
+ fprintf (f, " %i insns", node->local.inline_summary.self_insns);
+ if (node->global.insns && node->global.insns
+ != node->local.inline_summary.self_insns)
fprintf (f, " (%i after inlining)", node->global.insns);
- if (node->local.estimated_self_stack_size)
- fprintf (f, " %i bytes stack usage", (int)node->local.estimated_self_stack_size);
- if (node->global.estimated_stack_size != node->local.estimated_self_stack_size)
+ if (node->local.inline_summary.estimated_self_stack_size)
+ fprintf (f, " %i bytes stack usage", (int)node->local.inline_summary.estimated_self_stack_size);
+ if (node->global.estimated_stack_size != node->local.inline_summary.estimated_self_stack_size)
fprintf (f, " %i bytes after inlining", (int)node->global.estimated_stack_size);
if (node->origin)
fprintf (f, " nested in: %s", cgraph_node_name (node->origin));
struct cgraph_local_info GTY(())
{
- /* Estimated stack frame consumption by the function. */
- HOST_WIDE_INT estimated_self_stack_size;
+ struct inline_summary {
+ /* Estimated stack frame consumption by the function. */
+ HOST_WIDE_INT estimated_self_stack_size;
- /* Size of the function before inlining. */
- int self_insns;
+ /* Size of the function before inlining. */
+ int self_insns;
+ } inline_summary;
/* Set when function function is visible in current compilation unit only
and its address is never taken. */
cgraph_analyze_function (node);
push_cfun (DECL_STRUCT_FUNCTION (fndecl));
current_function_decl = fndecl;
- node->local.inlinable = tree_inlinable_function_p (fndecl);
- node->local.self_insns = estimate_num_insns (fndecl,
- &eni_inlining_weights);
- node->local.disregard_inline_limits
- |= DECL_DISREGARD_INLINE_LIMITS (fndecl);
- /* Inlining characteristics are maintained by the
- cgraph_mark_inline. */
- node->global.insns = node->local.self_insns;
- if (flag_really_no_inline && !node->local.disregard_inline_limits)
- node->local.inlinable = 0;
+ pass_ipa_inline.function_generate_summary (node);
if ((cgraph_state == CGRAPH_STATE_IPA_SSA
&& !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
/* When not optimizing, be sure we run early local passes anyway
static int overall_insns;
static gcov_type max_count;
+static inline struct inline_summary *
+inline_summary (struct cgraph_node *node)
+{
+ return &node->local.inline_summary;
+}
+
/* Estimate size of the function after inlining WHAT into TO. */
static int
else
e->callee->global.inlined_to = e->caller;
e->callee->global.stack_frame_offset
- = e->caller->global.stack_frame_offset + e->caller->local.estimated_self_stack_size;
- peak = e->callee->global.stack_frame_offset + e->callee->local.estimated_self_stack_size;
+ = e->caller->global.stack_frame_offset
+ + inline_summary (e->caller)->estimated_self_stack_size;
+ peak = e->callee->global.stack_frame_offset
+ + inline_summary (e->callee)->estimated_self_stack_size;
if (e->callee->global.inlined_to->global.estimated_stack_size < peak)
e->callee->global.inlined_to->global.estimated_stack_size = peak;
/* When inlining large function body called once into small function,
take the inlined function as base for limiting the growth. */
- if (to->local.self_insns > what->local.self_insns)
- limit = to->local.self_insns;
+ if (inline_summary (to)->self_insns > inline_summary(what)->self_insns)
+ limit = inline_summary (to)->self_insns;
else
- limit = what->local.self_insns;
+ limit = inline_summary (what)->self_insns;
limit += limit * PARAM_VALUE (PARAM_LARGE_FUNCTION_GROWTH) / 100;
return false;
}
- stack_size_limit = to->local.estimated_self_stack_size;
+ stack_size_limit = inline_summary (to)->estimated_self_stack_size;
stack_size_limit += stack_size_limit * PARAM_VALUE (PARAM_STACK_FRAME_GROWTH) / 100;
inlined_stack = (to->global.stack_frame_offset
- + to->local.estimated_self_stack_size
+ + inline_summary (to)->estimated_self_stack_size
+ what->global.estimated_stack_size);
if (inlined_stack > stack_size_limit
&& inlined_stack > PARAM_VALUE (PARAM_LARGE_STACK_FRAME))
{
struct cgraph_edge *e;
- initial_insns += node->local.self_insns;
- gcc_assert (node->local.self_insns == node->global.insns);
+ initial_insns += inline_summary (node)->self_insns;
+ gcc_assert (inline_summary (node)->self_insns == node->global.insns);
for (e = node->callees; e; e = e->next_callee)
if (max_count < e->count)
max_count = e->count;
struct cgraph_node *node = cgraph_node (current_function_decl);
gcc_assert (!node->global.inlined_to);
- node->local.estimated_self_stack_size = estimated_stack_frame_size ();
- node->global.estimated_stack_size = node->local.estimated_self_stack_size;
+ inline_summary (node)->estimated_self_stack_size
+ = estimated_stack_frame_size ();
+ node->global.estimated_stack_size
+ = inline_summary (node)->estimated_self_stack_size;
node->global.stack_frame_offset = 0;
node->local.inlinable = tree_inlinable_function_p (current_function_decl);
- node->local.self_insns = estimate_num_insns (current_function_decl,
- &eni_inlining_weights);
+ inline_summary (node)->self_insns = estimate_num_insns (current_function_decl,
+ &eni_inlining_weights);
if (node->local.inlinable && !node->local.disregard_inline_limits)
node->local.disregard_inline_limits
= DECL_DISREGARD_INLINE_LIMITS (current_function_decl);
if (flag_really_no_inline && !node->local.disregard_inline_limits)
node->local.inlinable = 0;
/* Inlining characteristics are maintained by the cgraph_mark_inline. */
- node->global.insns = node->local.self_insns;
+ node->global.insns = inline_summary (node)->self_insns;
return 0;
}
struct cgraph_node *first = (struct cgraph_node *) (void *) 1;
struct cgraph_node *node, *next;
bool changed = false;
- int insns = 0;
#ifdef ENABLE_CHECKING
verify_cgraph ();
next = node->next;
if (!node->aux)
{
- int local_insns;
- tree decl = node->decl;
-
node->global.inlined_to = NULL;
- if (DECL_STRUCT_FUNCTION (decl))
- local_insns = node->local.self_insns;
- else
- local_insns = 0;
if (file)
fprintf (file, " %s", cgraph_node_name (node));
if (!node->analyzed || !DECL_EXTERNAL (node->decl)
else
cgraph_remove_node (node);
}
- if (!DECL_SAVED_TREE (decl))
- insns += local_insns;
changed = true;
}
}
for (node = cgraph_nodes; node; node = node->next)
node->aux = NULL;
- if (file)
- fprintf (file, "\nReclaimed %i insns", insns);
#ifdef ENABLE_CHECKING
verify_cgraph ();
#endif
/* IPA Passes */
extern struct ipa_opt_pass pass_ipa_inline;
+extern struct simple_ipa_opt_pass pass_ipa_reference;
extern struct simple_ipa_opt_pass pass_ipa_matrix_reorg;
extern struct simple_ipa_opt_pass pass_ipa_cp;
extern struct simple_ipa_opt_pass pass_ipa_early_inline;
-extern struct simple_ipa_opt_pass pass_ipa_reference;
extern struct simple_ipa_opt_pass pass_ipa_pure_const;
extern struct simple_ipa_opt_pass pass_ipa_type_escape;
extern struct simple_ipa_opt_pass pass_ipa_pta;