/* By inlining function having uninitialized variable, we might
extend the lifetime (variable might get reused). This cause
ICE in the case we end up extending lifetime of SSA name across
- abnormal edge, but also increase register presure.
+ abnormal edge, but also increase register pressure.
We simply initialize all uninitialized vars by 0 except for case
we are inlining to very first BB. We can avoid this for all
{
value = *n;
STRIP_TYPE_NOPS (value);
- if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
+ if (TREE_CONSTANT (value) || TREE_READONLY (value))
{
*tp = build_empty_stmt ();
return copy_body_r (tp, walk_subtrees, data);
{
*tp = build1 (INDIRECT_REF, type, new);
TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
+ TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
}
}
*walk_subtrees = 0;
later */
static basic_block
-copy_bb (copy_body_data *id, basic_block bb, int frequency_scale, int count_scale)
+copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
+ gcov_type count_scale)
{
block_stmt_iterator bsi, copy_bsi;
basic_block copy_basic_block;
pointer_set_insert (id->statements_to_fold, stmt);
/* We're duplicating a CALL_EXPR. Find any corresponding
callgraph edges and update or duplicate them. */
- if (call && (decl = get_callee_fndecl (call)))
+ if (call)
{
struct cgraph_node *node;
struct cgraph_edge *edge;
edge = cgraph_edge (id->src_node, orig_stmt);
if (edge)
cgraph_clone_edge (edge, id->dst_node, stmt,
- REG_BR_PROB_BASE, 1, edge->frequency, true);
+ REG_BR_PROB_BASE, 1,
+ edge->frequency, true);
break;
case CB_CGE_MOVE_CLONES:
node = node->next_clone)
{
edge = cgraph_edge (node, orig_stmt);
- gcc_assert (edge);
- cgraph_set_call_stmt (edge, stmt);
+ if (edge)
+ cgraph_set_call_stmt (edge, stmt);
}
/* FALLTHRU */
accordingly. Edges will be taken care of later. Assume aux
pointers to point to the copies of each BB. */
static void
-copy_edges_for_bb (basic_block bb, int count_scale, basic_block ret_bb)
+copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
{
basic_block new_bb = (basic_block) bb->aux;
edge_iterator ei;
static void
copy_phis_for_bb (basic_block bb, copy_body_data *id)
{
- basic_block new_bb = bb->aux;
+ basic_block const new_bb = (basic_block) bb->aux;
edge_iterator ei;
tree phi;
= new_phi = create_phi_node (new_res, new_bb);
FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
{
- edge old_edge = find_edge (new_edge->src->aux, bb);
+ edge const old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
tree arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
tree new_arg = arg;
struct function *new_cfun
= (struct function *) ggc_alloc_cleared (sizeof (struct function));
struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
- int count_scale, frequency_scale;
+ gcov_type count_scale, frequency_scale;
if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
count_scale = (REG_BR_PROB_BASE * count
if (src_cfun->gimple_df)
{
- init_tree_ssa ();
+ init_tree_ssa (cfun);
cfun->gimple_df->in_ssa_p = true;
init_ssa_operands ();
}
struct function *cfun_to_copy;
basic_block bb;
tree new_fndecl = NULL;
- int count_scale, frequency_scale;
+ gcov_type count_scale, frequency_scale;
int last;
if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
{
tree init_stmt;
tree var;
- tree var_sub;
tree rhs = value;
tree def = (gimple_in_ssa_p (cfun)
? gimple_default_def (id->src_cfun, p) : NULL);
add_referenced_var (var);
}
- /* See if the frontend wants to pass this by invisible reference. If
- so, our new VAR_DECL will have REFERENCE_TYPE, and we need to
- replace uses of the PARM_DECL with dereferences. */
- if (TREE_TYPE (var) != TREE_TYPE (p)
- && POINTER_TYPE_P (TREE_TYPE (var))
- && TREE_TYPE (TREE_TYPE (var)) == TREE_TYPE (p))
- {
- insert_decl_map (id, var, var);
- var_sub = build_fold_indirect_ref (var);
- }
- else
- var_sub = var;
-
/* Register the VAR_DECL as the equivalent for the PARM_DECL;
that way, when the PARM_DECL is encountered, it will be
automatically replaced by the VAR_DECL. */
- insert_decl_map (id, p, var_sub);
+ insert_decl_map (id, p, var);
/* Declare this new variable. */
TREE_CHAIN (var) = *vars;
if (rhs == error_mark_node)
{
- insert_decl_map (id, p, var_sub);
+ insert_decl_map (id, p, var);
return;
}
|| !is_gimple_reg (var))
{
tree_stmt_iterator i;
+ struct gimplify_ctx gctx;
- push_gimplify_context ();
+ push_gimplify_context (&gctx);
gimplify_stmt (&init_stmt);
if (gimple_in_ssa_p (cfun)
&& init_stmt && TREE_CODE (init_stmt) == STATEMENT_LIST)
}
/* If VAR represents a zero-sized variable, it's possible that the
- assignment statment may result in no gimple statements. */
+ assignment statement may result in no gimple statements. */
if (init_stmt)
bsi_insert_after (&bsi, init_stmt, BSI_NEW_STMT);
if (gimple_in_ssa_p (cfun))
static tree
estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data)
{
- struct eni_data *d = data;
+ struct eni_data *const d = (struct eni_data *) data;
tree x = *tp;
unsigned cost;
case BIND_EXPR:
case WITH_CLEANUP_EXPR:
case PAREN_EXPR:
- case NOP_EXPR:
- case CONVERT_EXPR:
+ CASE_CONVERT:
case VIEW_CONVERT_EXPR:
case SAVE_EXPR:
case ADDR_EXPR:
case EH_FILTER_EXPR:
case STATEMENT_LIST:
case ERROR_MARK:
- case NON_LVALUE_EXPR:
case FDESC_EXPR:
case VA_ARG_EXPR:
case TRY_CATCH_EXPR:
}
case OMP_PARALLEL:
+ case OMP_TASK:
case OMP_FOR:
case OMP_SECTIONS:
case OMP_SINGLE:
BLOCK_SUPERCONTEXT (new_block) = current_block;
}
+/* Fetch callee declaration from the call graph edge going from NODE and
+ associated with STMR call statement. Return NULL_TREE if not found. */
+static tree
+get_indirect_callee_fndecl (struct cgraph_node *node, tree stmt)
+{
+ struct cgraph_edge *cs;
+
+ cs = cgraph_edge (node, stmt);
+ if (cs)
+ return cs->callee->decl;
+
+ return NULL_TREE;
+}
+
/* If *TP is a CALL_EXPR, replace it with its inline expansion. */
static bool
If we cannot, then there is no hope of inlining the function. */
fn = get_callee_fndecl (t);
if (!fn)
- goto egress;
+ {
+ fn = get_indirect_callee_fndecl (id->dst_node, stmt);
+ if (!fn)
+ goto egress;
+ }
/* Turn forward declarations into real ones. */
fn = cgraph_node (fn)->decl;
inlining. */
if (!cgraph_inline_p (cg_edge, &reason))
{
+ /* If this call was originally indirect, we do not want to emit any
+ inlining related warnings or sorry messages because there are no
+ guarantees regarding those. */
+ if (cg_edge->indirect_call)
+ goto egress;
+
if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
/* Avoid warnings during early inline pass. */
&& (!flag_unit_at_a_time || cgraph_global_info_ready))
tree prev_fn;
basic_block bb;
int last = n_basic_blocks;
+ struct gimplify_ctx gctx;
+
/* There is no point in performing inlining if errors have already
occurred -- and we might crash if we try to inline invalid
code. */
id.transform_lang_insert_block = NULL;
id.statements_to_fold = pointer_set_create ();
- push_gimplify_context ();
+ push_gimplify_context (&gctx);
/* We make no attempts to keep dominance info up-to-date. */
free_dominance_info (CDI_DOMINATORS);
if (tree_map)
for (i = 0; i < VARRAY_ACTIVE_SIZE (tree_map); i++)
{
- replace_info = VARRAY_GENERIC_PTR (tree_map, i);
+ replace_info = (struct ipa_replace_map *) VARRAY_GENERIC_PTR (tree_map, i);
if (replace_info->replace_p)
insert_decl_map (&id, replace_info->old_tree,
replace_info->new_tree);
return type;
}
+
+/* Return whether it is safe to inline a function because it used different
+ target specific options or different optimization options. */
+bool
+tree_can_inline_p (tree caller, tree callee)
+{
+ /* Don't inline a function with a higher optimization level than the
+ caller, or with different space constraints (hot/cold functions). */
+ tree caller_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (caller);
+ tree callee_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (callee);
+
+ if (caller_tree != callee_tree)
+ {
+ struct cl_optimization *caller_opt
+ = TREE_OPTIMIZATION ((caller_tree)
+ ? caller_tree
+ : optimization_default_node);
+
+ struct cl_optimization *callee_opt
+ = TREE_OPTIMIZATION ((callee_tree)
+ ? callee_tree
+ : optimization_default_node);
+
+ if ((caller_opt->optimize > callee_opt->optimize)
+ || (caller_opt->optimize_size != callee_opt->optimize_size))
+ return false;
+ }
+
+ /* Allow the backend to decide if inlining is ok. */
+ return targetm.target_option.can_inline_p (caller, callee);
+}