/* Tree inlining.
- Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
- Free Software Foundation, Inc.
+ Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
+ 2012 Free Software Foundation, Inc.
Contributed by Alexandre Oliva <aoliva@redhat.com>
This file is part of GCC.
#include "value-prof.h"
#include "tree-pass.h"
#include "target.h"
-#include "integrate.h"
#include "rtl.h" /* FIXME: For asm_str_count. */
if (processing_debug_stmt)
{
+ if (TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
+ && SSA_NAME_IS_DEFAULT_DEF (name)
+ && id->entry_bb == NULL
+ && single_succ_p (ENTRY_BLOCK_PTR))
+ {
+ tree vexpr = make_node (DEBUG_EXPR_DECL);
+ gimple def_temp;
+ gimple_stmt_iterator gsi;
+ tree val = SSA_NAME_VAR (name);
+
+ n = (tree *) pointer_map_contains (id->decl_map, val);
+ if (n != NULL)
+ val = *n;
+ if (TREE_CODE (val) != PARM_DECL)
+ {
+ processing_debug_stmt = -1;
+ return name;
+ }
+ def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
+ DECL_ARTIFICIAL (vexpr) = 1;
+ TREE_TYPE (vexpr) = TREE_TYPE (name);
+ DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (name));
+ gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
+ gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
+ return vexpr;
+ }
+
processing_debug_stmt = -1;
return name;
}
walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
}
- if (cfun && gimple_in_ssa_p (cfun)
- && (TREE_CODE (t) == VAR_DECL
- || TREE_CODE (t) == RESULT_DECL || TREE_CODE (t) == PARM_DECL))
- {
- get_var_ann (t);
- add_referenced_var (t);
- }
+ if ((TREE_CODE (t) == VAR_DECL
+ || TREE_CODE (t) == RESULT_DECL
+ || TREE_CODE (t) == PARM_DECL)
+ && id->src_fn && DECL_STRUCT_FUNCTION (id->src_fn)
+ && gimple_referenced_vars (DECL_STRUCT_FUNCTION (id->src_fn))
+ /* We don't want to mark as referenced VAR_DECLs that were
+ not marked as such in the src function. */
+ && (TREE_CODE (decl) != VAR_DECL
+ || referenced_var_lookup (DECL_STRUCT_FUNCTION (id->src_fn),
+ DECL_UID (decl))))
+ add_referenced_var (t);
return t;
}
{
tree stmt = tsi_stmt (oi);
if (TREE_CODE (stmt) == STATEMENT_LIST)
+ /* This copy is not redundant; tsi_link_after will smash this
+ STATEMENT_LIST into the end of the one we're building, and we
+ don't want to do that with the original. */
copy_statement_list (&stmt);
tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
}
|| decl_function_context (*tp) == id->src_fn))
/* These may need to be remapped for EH handling. */
*tp = remap_decl (*tp, id);
+ else if (TREE_CODE (*tp) == FIELD_DECL)
+ {
+ /* If the enclosing record type is variably_modified_type_p, the field
+ has already been remapped. Otherwise, it need not be. */
+ tree *n = (tree *) pointer_map_contains (id->decl_map, *tp);
+ if (n)
+ *tp = *n;
+ *walk_subtrees = 0;
+ }
else if (TYPE_P (*tp))
/* Types may need remapping as well. */
*tp = remap_type (*tp, id);
{
/* Otherwise, just copy the node. Note that copy_tree_r already
knows not to copy VAR_DECLs, etc., so this is safe. */
+
+ /* We should never have TREE_BLOCK set on non-statements. */
+ if (EXPR_P (*tp))
+ gcc_assert (!TREE_BLOCK (*tp));
+
if (TREE_CODE (*tp) == MEM_REF)
{
- /* We need to re-canonicalize MEM_REFs from inline substitutions
- that can happen when a pointer argument is an ADDR_EXPR. */
- tree decl = TREE_OPERAND (*tp, 0);
- tree *n;
-
- /* See remap_ssa_name. */
- if (TREE_CODE (decl) == SSA_NAME
- && TREE_CODE (SSA_NAME_VAR (decl)) == RESULT_DECL
- && id->transform_return_to_modify)
- decl = SSA_NAME_VAR (decl);
+ tree ptr = TREE_OPERAND (*tp, 0);
+ tree type = remap_type (TREE_TYPE (*tp), id);
+ tree old = *tp;
- n = (tree *) pointer_map_contains (id->decl_map, decl);
- if (n)
- {
- tree old = *tp;
- tree ptr = unshare_expr (*n);
- tree tem;
- if ((tem = maybe_fold_offset_to_reference (EXPR_LOCATION (*tp),
- ptr,
- TREE_OPERAND (*tp, 1),
- TREE_TYPE (*tp)))
- && TREE_THIS_VOLATILE (tem) == TREE_THIS_VOLATILE (old))
- {
- tree *tem_basep = &tem;
- while (handled_component_p (*tem_basep))
- tem_basep = &TREE_OPERAND (*tem_basep, 0);
- if (TREE_CODE (*tem_basep) == MEM_REF)
- *tem_basep
- = build2 (MEM_REF, TREE_TYPE (*tem_basep),
- TREE_OPERAND (*tem_basep, 0),
- fold_convert (TREE_TYPE (TREE_OPERAND (*tp, 1)),
- TREE_OPERAND (*tem_basep, 1)));
- else
- *tem_basep
- = build2 (MEM_REF, TREE_TYPE (*tem_basep),
- build_fold_addr_expr (*tem_basep),
- build_int_cst
- (TREE_TYPE (TREE_OPERAND (*tp, 1)), 0));
- *tp = tem;
- }
- else
- {
- *tp = fold_build2 (MEM_REF, TREE_TYPE (*tp),
- ptr, TREE_OPERAND (*tp, 1));
- TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
- TREE_THIS_NOTRAP (*tp) = TREE_THIS_NOTRAP (old);
- }
- TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
- *walk_subtrees = 0;
- return NULL;
- }
+ /* We need to re-canonicalize MEM_REFs from inline substitutions
+ that can happen when a pointer argument is an ADDR_EXPR.
+ Recurse here manually to allow that. */
+ walk_tree (&ptr, remap_gimple_op_r, data, NULL);
+ *tp = fold_build2 (MEM_REF, type,
+ ptr, TREE_OPERAND (*tp, 1));
+ TREE_THIS_NOTRAP (*tp) = TREE_THIS_NOTRAP (old);
+ TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
+ TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
+ *walk_subtrees = 0;
+ return NULL;
}
/* Here is the "usual case". Copy this tree node, and then
tweak some special cases. */
copy_tree_r (tp, walk_subtrees, NULL);
+ if (TREE_CODE (*tp) != OMP_CLAUSE)
+ TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
+
/* Global variables we haven't seen yet need to go into referenced
vars. If not referenced from types only. */
- if (gimple_in_ssa_p (cfun)
- && TREE_CODE (*tp) == VAR_DECL
+ if (gimple_referenced_vars (cfun)
+ && TREE_CODE (*tp) == VAR_DECL && !is_global_var (*tp)
&& id->remapping_type_depth == 0
&& !processing_debug_stmt)
add_referenced_var (*tp);
- /* We should never have TREE_BLOCK set on non-statements. */
- if (EXPR_P (*tp))
- gcc_assert (!TREE_BLOCK (*tp));
-
- if (TREE_CODE (*tp) != OMP_CLAUSE)
- TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
-
if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
{
/* The copied TARGET_EXPR has never been expanded, even if the
/* Global variables we haven't seen yet needs to go into referenced
vars. If not referenced from types or debug stmts only. */
- if (gimple_in_ssa_p (cfun)
- && TREE_CODE (*tp) == VAR_DECL
+ if (gimple_referenced_vars (cfun)
+ && TREE_CODE (*tp) == VAR_DECL && !is_global_var (*tp)
&& id->remapping_type_depth == 0
&& !processing_debug_stmt)
add_referenced_var (*tp);
old_nr = tree_low_cst (old_t_nr, 0);
new_nr = remap_eh_region_nr (old_nr, id);
- return build_int_cst (NULL, new_nr);
+ return build_int_cst (integer_type_node, new_nr);
}
/* Helper for copy_bb. Remap statement STMT using the inlining
= gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
break;
+ case GIMPLE_TRANSACTION:
+ s1 = remap_gimple_seq (gimple_transaction_body (stmt), id);
+ copy = gimple_build_transaction (s1, gimple_transaction_label (stmt));
+ gimple_transaction_set_subcode (copy, gimple_transaction_subcode (stmt));
+ break;
+
default:
gcc_unreachable ();
}
VEC_safe_push (gimple, heap, id->debug_stmts, copy);
return copy;
}
+ if (gimple_debug_source_bind_p (stmt))
+ {
+ copy = gimple_build_debug_source_bind
+ (gimple_debug_source_bind_get_var (stmt),
+ gimple_debug_source_bind_get_value (stmt), stmt);
+ VEC_safe_push (gimple, heap, id->debug_stmts, copy);
+ return copy;
+ }
/* Create a new deep copy of the statement. */
copy = gimple_copy (stmt);
gimple_set_block (copy, new_block);
- if (gimple_debug_bind_p (copy))
+ if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy))
return copy;
/* Remap all the operands in COPY. */
edge = cgraph_clone_edge (edge, id->dst_node, stmt,
gimple_uid (stmt),
REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
- edge->frequency, true);
+ true);
/* We could also just rescale the frequency, but
doing so would introduce roundoff errors and make
verifier unhappy. */
edge->frequency
- = compute_call_stmt_bb_frequency (id->dst_node->decl,
+ = compute_call_stmt_bb_frequency (id->dst_node->symbol.decl,
copy_basic_block);
if (dump_file
&& profile_status_for_function (cfun) != PROFILE_ABSENT
if ((!edge
|| (edge->indirect_inlining_edge
&& id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
+ && id->dst_node->analyzed
&& (fn = gimple_call_fndecl (stmt)) != NULL)
{
- struct cgraph_node *dest = cgraph_node (fn);
+ struct cgraph_node *dest = cgraph_get_node (fn);
/* We have missing edge in the callgraph. This can happen
when previous inlining turned an indirect call into a
producing dead clone (for further cloning). In all
other cases we hit a bug (incorrect node sharing is the
most common reason for missing edges). */
- gcc_assert (dest->needed || !dest->analyzed
- || dest->address_taken
+ gcc_assert (!dest->analyzed
+ || dest->symbol.address_taken
|| !id->src_node->analyzed
|| !id->dst_node->analyzed);
if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
cgraph_create_edge_including_clones
(id->dst_node, dest, orig_stmt, stmt, bb->count,
- compute_call_stmt_bb_frequency (id->dst_node->decl,
+ compute_call_stmt_bb_frequency (id->dst_node->symbol.decl,
copy_basic_block),
- bb->loop_depth, CIF_ORIGINALLY_INDIRECT_CALL);
+ CIF_ORIGINALLY_INDIRECT_CALL);
else
cgraph_create_edge (id->dst_node, dest, stmt,
bb->count,
compute_call_stmt_bb_frequency
- (id->dst_node->decl, copy_basic_block),
- bb->loop_depth)->inline_failed
+ (id->dst_node->symbol.decl,
+ copy_basic_block))->inline_failed
= CIF_ORIGINALLY_INDIRECT_CALL;
if (dump_file)
{
ssa_op_iter i;
tree def;
- find_new_referenced_vars (gsi_stmt (copy_gsi));
+ find_referenced_vars_in (gsi_stmt (copy_gsi));
FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
if (TREE_CODE (def) == SSA_NAME)
SSA_NAME_DEF_STMT (def) = stmt;
copy_stmt = gsi_stmt (si);
if (!is_gimple_debug (copy_stmt))
- {
- update_stmt (copy_stmt);
- if (gimple_in_ssa_p (cfun))
- mark_symbols_for_renaming (copy_stmt);
- }
+ update_stmt (copy_stmt);
/* Do this before the possible split_block. */
gsi_next (&si);
edge new_edge;
bool inserted = false;
- for (si = gsi_start (phi_nodes (bb)); !gsi_end_p (si); gsi_next (&si))
+ for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
{
tree res, new_res;
gimple new_phi;
cfun->static_chain_decl = src_cfun->static_chain_decl;
cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
cfun->function_end_locus = src_cfun->function_end_locus;
- cfun->curr_properties = src_cfun->curr_properties;
+ cfun->curr_properties = src_cfun->curr_properties & ~PROP_loops;
cfun->last_verified = src_cfun->last_verified;
cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
cfun->stdarg = src_cfun->stdarg;
- cfun->dont_save_pending_sizes_p = src_cfun->dont_save_pending_sizes_p;
cfun->after_inlining = src_cfun->after_inlining;
cfun->can_throw_non_call_exceptions
= src_cfun->can_throw_non_call_exceptions;
{
init_tree_ssa (cfun);
cfun->gimple_df->in_ssa_p = true;
- init_ssa_operands ();
+ init_ssa_operands (cfun);
}
pop_cfun ();
}
{
si = ssi;
gsi_prev (&ssi);
- if (!single_pred_p (e->dest))
+ if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
gimple_debug_bind_reset_value (stmt);
gsi_remove (&si, false);
gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
continue;
}
- var = gimple_debug_bind_get_var (stmt);
- if (single_pred_p (e->dest))
+ if (gimple_debug_bind_p (stmt))
+ {
+ var = gimple_debug_bind_get_var (stmt);
+ if (single_pred_p (e->dest))
+ {
+ value = gimple_debug_bind_get_value (stmt);
+ value = unshare_expr (value);
+ }
+ else
+ value = NULL_TREE;
+ new_stmt = gimple_build_debug_bind (var, value, stmt);
+ }
+ else if (gimple_debug_source_bind_p (stmt))
{
- value = gimple_debug_bind_get_value (stmt);
- value = unshare_expr (value);
+ var = gimple_debug_source_bind_get_var (stmt);
+ value = gimple_debug_source_bind_get_value (stmt);
+ new_stmt = gimple_build_debug_source_bind (var, value, stmt);
}
else
- value = NULL_TREE;
- new_stmt = gimple_build_debug_bind (var, value, stmt);
+ gcc_unreachable ();
gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
VEC_safe_push (gimple, heap, id->debug_stmts, new_stmt);
gsi_prev (&ssi);
t = id->block;
if (gimple_block (stmt))
{
- tree *n;
n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
if (n)
t = *n;
processing_debug_stmt = 1;
- t = gimple_debug_bind_get_var (stmt);
+ if (gimple_debug_source_bind_p (stmt))
+ t = gimple_debug_source_bind_get_var (stmt);
+ else
+ t = gimple_debug_bind_get_var (stmt);
if (TREE_CODE (t) == PARM_DECL && id->debug_map
&& (n = (tree *) pointer_map_contains (id->debug_map, t)))
else
walk_tree (&t, remap_gimple_op_r, &wi, NULL);
- gimple_debug_bind_set_var (stmt, t);
+ if (gimple_debug_bind_p (stmt))
+ {
+ gimple_debug_bind_set_var (stmt, t);
- if (gimple_debug_bind_has_value_p (stmt))
- walk_tree (gimple_debug_bind_get_value_ptr (stmt),
- remap_gimple_op_r, &wi, NULL);
+ if (gimple_debug_bind_has_value_p (stmt))
+ walk_tree (gimple_debug_bind_get_value_ptr (stmt),
+ remap_gimple_op_r, &wi, NULL);
- /* Punt if any decl couldn't be remapped. */
- if (processing_debug_stmt < 0)
- gimple_debug_bind_reset_value (stmt);
+ /* Punt if any decl couldn't be remapped. */
+ if (processing_debug_stmt < 0)
+ gimple_debug_bind_reset_value (stmt);
+ }
+ else if (gimple_debug_source_bind_p (stmt))
+ {
+ gimple_debug_source_bind_set_var (stmt, t);
+ walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
+ remap_gimple_op_r, &wi, NULL);
+ }
processing_debug_stmt = 0;
update_stmt (stmt);
- if (gimple_in_ssa_p (cfun))
- mark_symbols_for_renaming (stmt);
}
/* Process deferred debug stmts. In order to give values better odds
}
gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
gimple_regimplify_operands (init_stmt, &si);
- mark_symbols_for_renaming (init_stmt);
if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
{
&& value != error_mark_node
&& !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
{
+ /* If we can match up types by promotion/demotion do so. */
if (fold_convertible_p (TREE_TYPE (p), value))
- rhs = fold_build1 (NOP_EXPR, TREE_TYPE (p), value);
+ rhs = fold_convert (TREE_TYPE (p), value);
else
- /* ??? For valid (GIMPLE) programs we should not end up here.
- Still if something has gone wrong and we end up with truly
- mismatched types here, fall back to using a VIEW_CONVERT_EXPR
- to not leak invalid GIMPLE to the following passes. */
- rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
+ {
+ /* ??? For valid programs we should not end up here.
+ Still if we end up with truly mismatched types here, fall back
+ to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
+ GIMPLE to the following passes. */
+ if (!is_gimple_reg_type (TREE_TYPE (value))
+ || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
+ rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
+ else
+ rhs = build_zero_cst (TREE_TYPE (p));
+ }
}
/* Make an equivalent VAR_DECL. Note that we must NOT remap the type
/* We're actually using the newly-created var. */
if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
- {
- get_var_ann (var);
- add_referenced_var (var);
- }
+ add_referenced_var (var);
/* Declare this new variable. */
DECL_CHAIN (var) = *vars;
/* Make gimplifier happy about this variable. */
DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
+ /* We are eventually using the value - make sure all variables
+ referenced therein are properly recorded. */
+ if (value
+ && gimple_referenced_vars (cfun)
+ && TREE_CODE (value) == ADDR_EXPR)
+ {
+ tree base = get_base_address (TREE_OPERAND (value, 0));
+ if (base && TREE_CODE (base) == VAR_DECL && !is_global_var (base))
+ add_referenced_var (base);
+ }
+
/* If the parameter is never assigned to, has no SSA_NAMEs created,
we would not need to create a new variable here at all, if it
weren't for debug info. Still, we can just use the argument
STRIP_USELESS_TYPE_CONVERSION (rhs);
- /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
- keep our trees in gimple form. */
- if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
+ /* If we are in SSA form properly remap the default definition
+ or assign to a dummy SSA name if the parameter is unused and
+ we are not optimizing. */
+ if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
{
- def = remap_ssa_name (def, id);
- init_stmt = gimple_build_assign (def, rhs);
- SSA_NAME_IS_DEFAULT_DEF (def) = 0;
- set_default_def (var, NULL);
+ if (def)
+ {
+ def = remap_ssa_name (def, id);
+ init_stmt = gimple_build_assign (def, rhs);
+ SSA_NAME_IS_DEFAULT_DEF (def) = 0;
+ set_default_def (var, NULL);
+ }
+ else if (!optimize)
+ {
+ def = make_ssa_name (var, NULL);
+ init_stmt = gimple_build_assign (def, rhs);
+ }
}
else
init_stmt = gimple_build_assign (var, rhs);
basic_block entry_bb)
{
tree callee = id->src_fn;
- tree caller = id->dst_fn;
tree result = DECL_RESULT (callee);
tree callee_type = TREE_TYPE (result);
tree caller_type;
else
caller_type = TREE_TYPE (TREE_TYPE (callee));
- /* We don't need to do anything for functions that don't return
- anything. */
- if (!result || VOID_TYPE_P (callee_type))
+ /* We don't need to do anything for functions that don't return anything. */
+ if (VOID_TYPE_P (callee_type))
return NULL_TREE;
/* If there was a return slot, then the return value is the
gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
var = copy_result_decl_to_var (result, id);
- if (gimple_in_ssa_p (cfun))
- {
- get_var_ann (var);
- add_referenced_var (var);
- }
+ if (gimple_referenced_vars (cfun))
+ add_referenced_var (var);
DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
- add_local_decl (DECL_STRUCT_FUNCTION (caller), var);
/* Do not have the rest of GCC warn about this variable as it should
not be visible to the user. */
promoted, convert it back to the expected type. */
use = var;
if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
- use = fold_convert (caller_type, var);
+ {
+ /* If we can match up types by promotion/demotion do so. */
+ if (fold_convertible_p (caller_type, var))
+ use = fold_convert (caller_type, var);
+ else
+ {
+ /* ??? For valid programs we should not end up here.
+ Still if we end up with truly mismatched types here, fall back
+ to using a MEM_REF to not leak invalid GIMPLE to the following
+ passes. */
+ /* Prevent var from being written into SSA form. */
+ if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
+ || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
+ DECL_GIMPLE_REG_P (var) = false;
+ else if (is_gimple_reg_type (TREE_TYPE (var)))
+ TREE_ADDRESSABLE (var) = true;
+ use = fold_build2 (MEM_REF, caller_type,
+ build_fold_addr_expr (var),
+ build_int_cst (ptr_type_node, 0));
+ }
+ }
STRIP_USELESS_TYPE_CONVERSION (use);
TREE_ADDRESSABLE (var) = 1;
var = build_fold_addr_expr (var);
}
+ else if (gimple_in_ssa_p (cfun)
+ && is_gimple_reg (var))
+ /* ??? Re-org id->retval and its special handling so that we can
+ record an SSA name directly and not need to invoke the SSA renamer. */
+ mark_sym_for_renaming (var);
done:
/* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
&& !is_gimple_val (var))
{
tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
- if (gimple_in_ssa_p (id->src_cfun))
+ if (gimple_referenced_vars (cfun))
+ add_referenced_var (temp);
+ insert_decl_map (id, result, temp);
+ /* When RESULT_DECL is in SSA form, we need to remap and initialize
+ it's default_def SSA_NAME. */
+ if (gimple_in_ssa_p (id->src_cfun)
+ && is_gimple_reg (result))
{
- get_var_ann (temp);
- add_referenced_var (temp);
+ temp = make_ssa_name (temp, NULL);
+ insert_decl_map (id, gimple_default_def (id->src_cfun, result),
+ temp);
}
- insert_decl_map (id, result, temp);
- /* When RESULT_DECL is in SSA form, we need to use it's default_def
- SSA_NAME. */
- if (gimple_in_ssa_p (id->src_cfun) && gimple_default_def (id->src_cfun, result))
- temp = remap_ssa_name (gimple_default_def (id->src_cfun, result), id);
insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
}
else
this may change program's memory overhead drastically when the
function using alloca is called in loop. In GCC present in
SPEC2000 inlining into schedule_block cause it to require 2GB of
- RAM instead of 256MB. */
+ RAM instead of 256MB. Don't do so for alloca calls emitted for
+ VLA objects as those can't cause unbounded growth (they're always
+ wrapped inside stack_save/stack_restore regions. */
if (gimple_alloca_call_p (stmt)
+ && !gimple_call_alloca_for_var_p (stmt)
&& !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
{
inline_forbidden_reason
pointer_set_destroy (visited_nodes);
return forbidden_p;
}
-
-/* Return true if CALLEE cannot be inlined into CALLER. */
-
+\f
+/* Return false if the function FNDECL cannot be inlined on account of its
+ attributes, true otherwise. */
static bool
-inline_forbidden_into_p (tree caller, tree callee)
+function_attribute_inlinable_p (const_tree fndecl)
{
- /* Don't inline if the functions have different EH personalities. */
- if (DECL_FUNCTION_PERSONALITY (caller)
- && DECL_FUNCTION_PERSONALITY (callee)
- && (DECL_FUNCTION_PERSONALITY (caller)
- != DECL_FUNCTION_PERSONALITY (callee)))
- return true;
+ if (targetm.attribute_table)
+ {
+ const_tree a;
- /* Don't inline if the callee can throw non-call exceptions but the
- caller cannot. */
- if (DECL_STRUCT_FUNCTION (callee)
- && DECL_STRUCT_FUNCTION (callee)->can_throw_non_call_exceptions
- && !(DECL_STRUCT_FUNCTION (caller)
- && DECL_STRUCT_FUNCTION (caller)->can_throw_non_call_exceptions))
- return true;
+ for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
+ {
+ const_tree name = TREE_PURPOSE (a);
+ int i;
- return false;
+ for (i = 0; targetm.attribute_table[i].name != NULL; i++)
+ if (is_attribute_p (targetm.attribute_table[i].name, name))
+ return targetm.function_attribute_inlinable_p (fndecl);
+ }
+ }
+
+ return true;
}
/* Returns nonzero if FN is a function that does not have any
As a bonus we can now give more details about the reason why a
function is not inlinable. */
if (always_inline)
- sorry (inline_forbidden_reason, fn);
+ error (inline_forbidden_reason, fn);
else if (do_warning)
warning (OPT_Winline, inline_forbidden_reason, fn);
??? We may consider mapping RTL costs to this. */
case COND_EXPR:
case VEC_COND_EXPR:
+ case VEC_PERM_EXPR:
case PLUS_EXPR:
case POINTER_PLUS_EXPR:
case DOT_PROD_EXPR:
case WIDEN_MULT_PLUS_EXPR:
case WIDEN_MULT_MINUS_EXPR:
+ case WIDEN_LSHIFT_EXPR:
case VEC_WIDEN_MULT_HI_EXPR:
case VEC_WIDEN_MULT_LO_EXPR:
case VEC_PACK_TRUNC_EXPR:
case VEC_PACK_SAT_EXPR:
case VEC_PACK_FIX_TRUNC_EXPR:
- case VEC_EXTRACT_EVEN_EXPR:
- case VEC_EXTRACT_ODD_EXPR:
- case VEC_INTERLEAVE_HIGH_EXPR:
- case VEC_INTERLEAVE_LOW_EXPR:
+ case VEC_WIDEN_LSHIFT_HI_EXPR:
+ case VEC_WIDEN_LSHIFT_LO_EXPR:
return 1;
likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
of moving something into "a", which we compute using the function
estimate_move_cost. */
+ if (gimple_clobber_p (stmt))
+ return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
+
lhs = gimple_assign_lhs (stmt);
rhs = gimple_assign_rhs1 (stmt);
case GIMPLE_CALL:
{
tree decl = gimple_call_fndecl (stmt);
- tree addr = gimple_call_fn (stmt);
- tree funtype = TREE_TYPE (addr);
- bool stdarg = false;
-
- if (POINTER_TYPE_P (funtype))
- funtype = TREE_TYPE (funtype);
+ struct cgraph_node *node = NULL;
/* Do not special case builtins where we see the body.
This just confuse inliner. */
- if (!decl || cgraph_node (decl)->analyzed)
+ if (!decl || !(node = cgraph_get_node (decl)) || node->analyzed)
;
/* For buitins that are likely expanded to nothing or
inlined do not account operand costs. */
}
}
- cost = weights->call_cost;
- if (decl)
- funtype = TREE_TYPE (decl);
-
- if (!VOID_TYPE_P (TREE_TYPE (funtype)))
- cost += estimate_move_cost (TREE_TYPE (funtype));
-
- if (funtype)
- stdarg = stdarg_p (funtype);
-
- /* Our cost must be kept in sync with
- cgraph_estimate_size_after_inlining that does use function
- declaration to figure out the arguments.
-
- For functions taking variable list of arguments we must
- look into call statement intself. This is safe because
- we will get only higher costs and in most cases we will
- not inline these anyway. */
- if (decl && DECL_ARGUMENTS (decl) && !stdarg)
+ cost = node ? weights->call_cost : weights->indirect_call_cost;
+ if (gimple_call_lhs (stmt))
+ cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)));
+ for (i = 0; i < gimple_call_num_args (stmt); i++)
{
- tree arg;
- for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
- if (!VOID_TYPE_P (TREE_TYPE (arg)))
- cost += estimate_move_cost (TREE_TYPE (arg));
+ tree arg = gimple_call_arg (stmt, i);
+ cost += estimate_move_cost (TREE_TYPE (arg));
}
- else if (funtype && prototype_p (funtype) && !stdarg)
- {
- tree t;
- for (t = TYPE_ARG_TYPES (funtype); t && t != void_list_node;
- t = TREE_CHAIN (t))
- if (!VOID_TYPE_P (TREE_VALUE (t)))
- cost += estimate_move_cost (TREE_VALUE (t));
- }
- else
- {
- for (i = 0; i < gimple_call_num_args (stmt); i++)
- {
- tree arg = gimple_call_arg (stmt, i);
- if (!VOID_TYPE_P (TREE_TYPE (arg)))
- cost += estimate_move_cost (TREE_TYPE (arg));
- }
- }
-
break;
}
return (weights->omp_cost
+ estimate_num_insns_seq (gimple_omp_body (stmt), weights));
+ case GIMPLE_TRANSACTION:
+ return (weights->tm_cost
+ + estimate_num_insns_seq (gimple_transaction_body (stmt),
+ weights));
+
default:
gcc_unreachable ();
}
init_inline_once (void)
{
eni_size_weights.call_cost = 1;
+ eni_size_weights.indirect_call_cost = 3;
eni_size_weights.target_builtin_call_cost = 1;
eni_size_weights.div_mod_cost = 1;
eni_size_weights.omp_cost = 40;
+ eni_size_weights.tm_cost = 10;
eni_size_weights.time_based = false;
eni_size_weights.return_cost = 1;
underestimating the cost does less harm than overestimating it, so
we choose a rather small value here. */
eni_time_weights.call_cost = 10;
+ eni_time_weights.indirect_call_cost = 15;
eni_time_weights.target_builtin_call_cost = 1;
eni_time_weights.div_mod_cost = 10;
eni_time_weights.omp_cost = 40;
+ eni_time_weights.tm_cost = 40;
eni_time_weights.time_based = true;
eni_time_weights.return_cost = 2;
}
if (gimple_code (stmt) != GIMPLE_CALL)
goto egress;
+ cg_edge = cgraph_edge (id->dst_node, stmt);
+ gcc_checking_assert (cg_edge);
/* First, see if we can figure out what function is being called.
If we cannot, then there is no hope of inlining the function. */
- fn = gimple_call_fndecl (stmt);
- if (!fn)
+ if (cg_edge->indirect_unknown_callee)
goto egress;
-
- /* Turn forward declarations into real ones. */
- fn = cgraph_node (fn)->decl;
+ fn = cg_edge->callee->symbol.decl;
+ gcc_checking_assert (fn);
/* If FN is a declaration of a function in a nested scope that was
globally declared inline, we don't set its DECL_INITIAL.
&& gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
fn = DECL_ABSTRACT_ORIGIN (fn);
- /* Objective C and fortran still calls tree_rest_of_compilation directly.
- Kill this check once this is fixed. */
- if (!id->dst_node->analyzed)
- goto egress;
-
- cg_edge = cgraph_edge (id->dst_node, stmt);
-
- /* First check that inlining isn't simply forbidden in this case. */
- if (inline_forbidden_into_p (cg_edge->caller->decl, cg_edge->callee->decl))
- goto egress;
-
/* Don't try to inline functions that are not well-suited to inlining. */
- if (!cgraph_inline_p (cg_edge, &reason))
+ if (cg_edge->inline_failed)
{
+ reason = cg_edge->inline_failed;
/* If this call was originally indirect, we do not want to emit any
inlining related warnings or sorry messages because there are no
guarantees regarding those. */
if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
/* Avoid warnings during early inline pass. */
- && cgraph_global_info_ready)
+ && cgraph_global_info_ready
+ /* PR 20090218-1_0.c. Body can be provided by another module. */
+ && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
{
- sorry ("inlining failed in call to %q+F: %s", fn,
- _(cgraph_inline_failed_string (reason)));
- sorry ("called from here");
+ error ("inlining failed in call to always_inline %q+F: %s", fn,
+ cgraph_inline_failed_string (reason));
+ error ("called from here");
}
- else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
+ else if (warn_inline
+ && DECL_DECLARED_INLINE_P (fn)
+ && !DECL_NO_INLINE_WARNING_P (fn)
&& !DECL_IN_SYSTEM_HEADER (fn)
&& reason != CIF_UNSPECIFIED
&& !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
+ /* Do not warn about not inlined recursive calls. */
+ && !cgraph_edge_recursive_p (cg_edge)
/* Avoid warnings during early inline pass. */
&& cgraph_global_info_ready)
{
}
goto egress;
}
- fn = cg_edge->callee->decl;
+ fn = cg_edge->callee->symbol.decl;
#ifdef ENABLE_CHECKING
- if (cg_edge->callee->decl != id->dst_node->decl)
+ if (cg_edge->callee->symbol.decl != id->dst_node->symbol.decl)
verify_cgraph_node (cg_edge->callee);
#endif
id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
/* Update the callers EH personality. */
- if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
- DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
- = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
+ if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->symbol.decl))
+ DECL_FUNCTION_PERSONALITY (cg_edge->caller->symbol.decl)
+ = DECL_FUNCTION_PERSONALITY (cg_edge->callee->symbol.decl);
/* Split the block holding the GIMPLE_CALL. */
e = split_block (bb, stmt);
gimple old_stmt = stmt;
stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
gsi_replace (&stmt_gsi, stmt, false);
- if (gimple_in_ssa_p (cfun))
- mark_symbols_for_renaming (stmt);
maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
}
else
inlined. If we don't do this now, we can lose the information about the
variables in the function when the blocks get blown away as soon as we
remove the cgraph node. */
- (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
+ (*debug_hooks->outlining_inline_function) (cg_edge->callee->symbol.decl);
/* Update callgraph if needed. */
cgraph_remove_node (cg_edge->callee);
/* Expand call statements reachable from STMT_P.
We can only have CALL_EXPRs as the "toplevel" tree code or nested
- in a MODIFY_EXPR. See gimple.c:get_call_expr_in(). We can
- unfortunately not use that function here because we need a pointer
- to the CALL_EXPR, not the tree itself. */
+ in a MODIFY_EXPR. */
static bool
gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
if (fold_stmt (&gsi))
{
gimple new_stmt;
+ /* If a builtin at the end of a bb folded into nothing,
+ the following loop won't work. */
+ if (gsi_end_p (gsi))
+ {
+ cgraph_update_edges_for_call_stmt (old_stmt,
+ old_decl, NULL);
+ break;
+ }
if (gsi_end_p (i2))
i2 = gsi_start_bb (BASIC_BLOCK (first));
else
struct gimplify_ctx gctx;
bool inlined_p = false;
- /* There is no point in performing inlining if errors have already
- occurred -- and we might crash if we try to inline invalid
- code. */
- if (seen_error ())
- return 0;
-
/* Clear out ID. */
memset (&id, 0, sizeof (id));
- id.src_node = id.dst_node = cgraph_node (fn);
+ id.src_node = id.dst_node = cgraph_get_node (fn);
+ gcc_assert (id.dst_node->analyzed);
id.dst_fn = fn;
/* Or any functions that aren't finished yet. */
if (current_function_decl)
here. */
tree chain = NULL_TREE, new_tree;
- chain = TREE_CHAIN (*tp);
+ if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
+ chain = TREE_CHAIN (*tp);
/* Copy the node. */
new_tree = copy_node (*tp);
CONSTRUCTOR_ELTS (*tp));
*tp = new_tree;
}
+ else if (code == STATEMENT_LIST)
+ /* We used to just abort on STATEMENT_LIST, but we can run into them
+ with statement-expressions (c++/40975). */
+ copy_statement_list (tp);
else if (TREE_CODE_CLASS (code) == tcc_type)
*walk_subtrees = 0;
else if (TREE_CODE_CLASS (code) == tcc_declaration)
*walk_subtrees = 0;
else if (TREE_CODE_CLASS (code) == tcc_constant)
*walk_subtrees = 0;
- else
- gcc_assert (code != STATEMENT_LIST);
return NULL_TREE;
}
new function. */
DECL_CONTEXT (copy) = id->dst_fn;
+ if (TREE_CODE (decl) == VAR_DECL
+ /* C++ clones functions during parsing, before
+ referenced_vars. */
+ && gimple_referenced_vars (DECL_STRUCT_FUNCTION (id->src_fn))
+ && referenced_var_lookup (DECL_STRUCT_FUNCTION (id->src_fn),
+ DECL_UID (decl)))
+ add_referenced_var (copy);
+
return copy;
}
if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
{
tree new_tree = remap_decl (arg, id);
+ if (TREE_CODE (new_tree) != PARM_DECL)
+ new_tree = id->copy_decl (arg, id);
lang_hooks.dup_lang_specific_decl (new_tree);
*parg = new_tree;
parg = &DECL_CHAIN (new_tree);
as temporary variable later in function, the uses will be
replaced by local variable. */
tree var = copy_decl_to_var (arg, id);
- get_var_ann (var);
add_referenced_var (var);
insert_decl_map (id, arg, var);
/* Declare this new variable. */
if ((e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
{
if (!e->inline_failed)
- cgraph_remove_node_and_inline_clones (e->callee);
+ cgraph_remove_node_and_inline_clones (e->callee, id->dst_node);
else
cgraph_remove_edge (e);
}
if ((e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
{
if (!e->inline_failed)
- cgraph_remove_node_and_inline_clones (e->callee);
+ cgraph_remove_node_and_inline_clones (e->callee, id->dst_node);
else
cgraph_remove_edge (e);
}
}
}
- if (changed)
- tidy_fallthru_edges ();
return changed;
}
If non-NULL ARGS_TO_SKIP determine function parameters to remove
from new version.
+ If SKIP_RETURN is true, the new version will return void.
If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
If non_NULL NEW_ENTRY determine new entry BB of the clone.
*/
tree_function_versioning (tree old_decl, tree new_decl,
VEC(ipa_replace_map_p,gc)* tree_map,
bool update_clones, bitmap args_to_skip,
- bitmap blocks_to_copy, basic_block new_entry)
+ bool skip_return, bitmap blocks_to_copy,
+ basic_block new_entry)
{
struct cgraph_node *old_version_node;
struct cgraph_node *new_version_node;
&& TREE_CODE (new_decl) == FUNCTION_DECL);
DECL_POSSIBLY_INLINED (old_decl) = 1;
- old_version_node = cgraph_node (old_decl);
- new_version_node = cgraph_node (new_decl);
+ old_version_node = cgraph_get_node (old_decl);
+ gcc_checking_assert (old_version_node);
+ new_version_node = cgraph_get_node (new_decl);
+ gcc_checking_assert (new_version_node);
+
+ /* Copy over debug args. */
+ if (DECL_HAS_DEBUG_ARGS_P (old_decl))
+ {
+ VEC(tree, gc) **new_debug_args, **old_debug_args;
+ gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
+ DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
+ old_debug_args = decl_debug_args_lookup (old_decl);
+ if (old_debug_args)
+ {
+ new_debug_args = decl_debug_args_insert (new_decl);
+ *new_debug_args = VEC_copy (tree, gc, *old_debug_args);
+ }
+ }
/* Output the inlining info for this abstract function, since it has been
inlined. If we don't do this now, we can lose the information about the
if (TREE_CODE (op) == ADDR_EXPR)
{
- op = TREE_OPERAND (op, 0);
- while (handled_component_p (op))
- op = TREE_OPERAND (op, 0);
- if (TREE_CODE (op) == VAR_DECL)
+ op = get_base_address (TREE_OPERAND (op, 0));
+ if (op && TREE_CODE (op) == VAR_DECL && !is_global_var (op))
add_referenced_var (op);
}
gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
args_to_skip, &vars);
DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
+ BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
declare_inline_vars (DECL_INITIAL (new_decl), vars);
/* Add local vars. */
add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id, false);
- /* Copy the Function's body. */
- copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
- ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, blocks_to_copy, new_entry);
-
- if (DECL_RESULT (old_decl) != NULL_TREE)
+ if (DECL_RESULT (old_decl) == NULL_TREE)
+ ;
+ else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
+ {
+ DECL_RESULT (new_decl)
+ = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
+ RESULT_DECL, NULL_TREE, void_type_node);
+ DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
+ cfun->returns_struct = 0;
+ cfun->returns_pcc_struct = 0;
+ }
+ else
{
- tree *res_decl = &DECL_RESULT (old_decl);
- DECL_RESULT (new_decl) = remap_decl (*res_decl, &id);
+ tree old_name;
+ DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
+ if (gimple_in_ssa_p (id.src_cfun)
+ && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
+ && (old_name
+ = gimple_default_def (id.src_cfun, DECL_RESULT (old_decl))))
+ {
+ tree new_name = make_ssa_name (DECL_RESULT (new_decl), NULL);
+ insert_decl_map (&id, old_name, new_name);
+ SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
+ set_default_def (DECL_RESULT (new_decl), new_name);
+ }
}
+ /* Copy the Function's body. */
+ copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
+ ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, blocks_to_copy, new_entry);
+
/* Renumber the lexical scoping (non-code) blocks consecutively. */
number_blocks (new_decl);
id.transform_call_graph_edges = CB_CGE_DUPLICATE;
id.transform_new_cfg = false;
id.transform_return_to_modify = true;
- id.transform_lang_insert_block = false;
+ id.transform_lang_insert_block = NULL;
/* Make sure not to unshare trees behind the front-end's back
since front-end specific mechanisms may rely on sharing. */
return type;
}
-
-/* Return whether it is safe to inline a function because it used different
- target specific options or call site actual types mismatch parameter types.
- E is the call edge to be checked. */
-bool
-tree_can_inline_p (struct cgraph_edge *e)
-{
-#if 0
- /* This causes a regression in SPEC in that it prevents a cold function from
- inlining a hot function. Perhaps this should only apply to functions
- that the user declares hot/cold/optimize explicitly. */
-
- /* Don't inline a function with a higher optimization level than the
- caller, or with different space constraints (hot/cold functions). */
- tree caller_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (caller);
- tree callee_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (callee);
-
- if (caller_tree != callee_tree)
- {
- struct cl_optimization *caller_opt
- = TREE_OPTIMIZATION ((caller_tree)
- ? caller_tree
- : optimization_default_node);
-
- struct cl_optimization *callee_opt
- = TREE_OPTIMIZATION ((callee_tree)
- ? callee_tree
- : optimization_default_node);
-
- if ((caller_opt->optimize > callee_opt->optimize)
- || (caller_opt->optimize_size != callee_opt->optimize_size))
- return false;
- }
-#endif
- tree caller, callee, lhs;
-
- caller = e->caller->decl;
- callee = e->callee->decl;
-
- /* First check that inlining isn't simply forbidden in this case. */
- if (inline_forbidden_into_p (caller, callee))
- {
- e->inline_failed = CIF_UNSPECIFIED;
- gimple_call_set_cannot_inline (e->call_stmt, true);
- return false;
- }
-
- /* Allow the backend to decide if inlining is ok. */
- if (!targetm.target_option.can_inline_p (caller, callee))
- {
- e->inline_failed = CIF_TARGET_OPTION_MISMATCH;
- gimple_call_set_cannot_inline (e->call_stmt, true);
- e->call_stmt_cannot_inline_p = true;
- return false;
- }
-
- /* Do not inline calls where we cannot triviall work around mismatches
- in argument or return types. */
- if (e->call_stmt
- && ((DECL_RESULT (callee)
- && !DECL_BY_REFERENCE (DECL_RESULT (callee))
- && (lhs = gimple_call_lhs (e->call_stmt)) != NULL_TREE
- && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee)),
- TREE_TYPE (lhs))
- && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee)), lhs))
- || !gimple_check_call_args (e->call_stmt)))
- {
- e->inline_failed = CIF_MISMATCHED_ARGUMENTS;
- gimple_call_set_cannot_inline (e->call_stmt, true);
- e->call_stmt_cannot_inline_p = true;
- return false;
- }
-
- return true;
-}