+2011-10-07 Tom de Vries <tom@codesourcery.com>
+
+ PR middle-end/50527
+ * tree.c (build_common_builtin_nodes): Add local_define_builtin for
+ * builtins.c (expand_builtin_alloca): Handle BUILT_IN_ALLOCA_WITH_ALIGN
+ * tree-ssa-ccp.c (evaluate_stmt): Set align for
+ * builtins.def (BUILT_IN_ALLOCA_WITH_ALIGN): Declare using
+ * ipa-pure-const.c (special_builtin_state): Handle
+ * tree-ssa-alias.c (ref_maybe_used_by_call_p_1)
+ * function.c (gimplify_parameters): Lower vla to
+ * gimplify.c (gimplify_vla_decl): Same.
+ * cfgexpand.c (expand_call_stmt): Handle BUILT_IN_ALLOCA_WITH_ALIGN.
+ * tree-mudflap.c (mf_xform_statements): Same.
+ * tree-ssa-dce.c (mark_stmt_if_obviously_necessary)
+ * varasm.c (incorporeal_function_p): Same.
+ * tree-object-size.c (alloc_object_size): Same.
+ * gimple.c (gimple_build_call_from_tree): Same.
+
2011-10-07 Bernd Schmidt <bernds@codesourcery.com>
* function.c (frame_required_for_rtx): Remove function.
{
rtx op0;
rtx result;
+ bool valid_arglist;
+ unsigned int align;
+ bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
+ == BUILT_IN_ALLOCA_WITH_ALIGN);
/* Emit normal call if marked not-inlineable. */
if (CALL_CANNOT_INLINE_P (exp))
return NULL_RTX;
- if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
+ valid_arglist
+ = (alloca_with_align
+ ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
+ : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
+
+ if (!valid_arglist)
return NULL_RTX;
/* Compute the argument. */
op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
+ /* Compute the alignment. */
+ align = (alloca_with_align
+ ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
+ : BIGGEST_ALIGNMENT);
+
/* Allocate the desired space. */
- result = allocate_dynamic_stack_space (op0, 0, BIGGEST_ALIGNMENT,
- cannot_accumulate);
+ result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
result = convert_memory_address (ptr_mode, result);
return result;
&& !called_as_built_in (fndecl)
&& DECL_ASSEMBLER_NAME_SET_P (fndecl)
&& fcode != BUILT_IN_ALLOCA
+ && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
&& fcode != BUILT_IN_FREE)
return expand_call (exp, target, ignore);
return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
case BUILT_IN_ALLOCA:
+ case BUILT_IN_ALLOCA_WITH_ALIGN:
/* If the allocation stems from the declaration of a variable-sized
object, it cannot accumulate. */
target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
{
case BUILT_IN_ABS:
case BUILT_IN_ALLOCA:
+ case BUILT_IN_ALLOCA_WITH_ALIGN:
case BUILT_IN_BSWAP32:
case BUILT_IN_BSWAP64:
case BUILT_IN_CLZ:
/* Implementing variable sized local variables. */
DEF_BUILTIN_STUB (BUILT_IN_STACK_SAVE, "__builtin_stack_save")
DEF_BUILTIN_STUB (BUILT_IN_STACK_RESTORE, "__builtin_stack_restore")
+DEF_BUILTIN_STUB (BUILT_IN_ALLOCA_WITH_ALIGN, "__builtin_alloca_with_align")
/* Object size checking builtins. */
DEF_GCC_BUILTIN (BUILT_IN_OBJECT_SIZE, "object_size", BT_FN_SIZE_CONST_PTR_INT, ATTR_PURE_NOTHROW_LEAF_LIST)
CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
if (decl
&& DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
- && DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA)
+ && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
+ || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
else
CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
DECL_IGNORED_P (addr) = 0;
local = build_fold_indirect_ref (addr);
- t = built_in_decls[BUILT_IN_ALLOCA];
- t = build_call_expr (t, 1, DECL_SIZE_UNIT (parm));
+ t = built_in_decls[BUILT_IN_ALLOCA_WITH_ALIGN];
+ t = build_call_expr (t, 1, DECL_SIZE_UNIT (parm),
+ size_int (DECL_ALIGN (parm)));
+
/* The call has been built for a variable-sized object. */
CALL_ALLOCA_FOR_VAR_P (t) = 1;
t = fold_convert (ptr_type, t);
gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
- && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA)
+ && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
+ || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
gimple_call_set_alloca_for_var (call, CALL_ALLOCA_FOR_VAR_P (t));
else
gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
SET_DECL_VALUE_EXPR (decl, t);
DECL_HAS_VALUE_EXPR_P (decl) = 1;
- t = built_in_decls[BUILT_IN_ALLOCA];
- t = build_call_expr (t, 1, DECL_SIZE_UNIT (decl));
+ t = built_in_decls[BUILT_IN_ALLOCA_WITH_ALIGN];
+ t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
+ size_int (DECL_ALIGN (decl)));
/* The call has been built for a variable-sized object. */
CALL_ALLOCA_FOR_VAR_P (t) = 1;
t = fold_convert (ptr_type, t);
case BUILT_IN_RETURN:
case BUILT_IN_UNREACHABLE:
case BUILT_IN_ALLOCA:
+ case BUILT_IN_ALLOCA_WITH_ALIGN:
case BUILT_IN_STACK_SAVE:
case BUILT_IN_STACK_RESTORE:
case BUILT_IN_EH_POINTER:
case GIMPLE_CALL:
{
tree fndecl = gimple_call_fndecl (s);
- if (fndecl && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA))
+ if (fndecl && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
+ || (DECL_FUNCTION_CODE (fndecl)
+ == BUILT_IN_ALLOCA_WITH_ALIGN)))
gimple_call_set_cannot_inline (s, true);
}
break;
/* fall through */
case BUILT_IN_MALLOC:
case BUILT_IN_ALLOCA:
+ case BUILT_IN_ALLOCA_WITH_ALIGN:
arg1 = 0;
default:
break;
case BUILT_IN_MALLOC:
case BUILT_IN_CALLOC:
case BUILT_IN_ALLOCA:
+ case BUILT_IN_ALLOCA_WITH_ALIGN:
case BUILT_IN_STACK_SAVE:
case BUILT_IN_STACK_RESTORE:
case BUILT_IN_MEMSET:
return false;
case BUILT_IN_STACK_SAVE:
case BUILT_IN_ALLOCA:
+ case BUILT_IN_ALLOCA_WITH_ALIGN:
case BUILT_IN_ASSUME_ALIGNED:
return false;
/* Freeing memory kills the pointed-to memory. More importantly
tree simplified = NULL_TREE;
ccp_lattice_t likelyvalue = likely_value (stmt);
bool is_constant = false;
+ unsigned int align;
if (dump_file && (dump_flags & TDF_DETAILS))
{
break;
case BUILT_IN_ALLOCA:
+ case BUILT_IN_ALLOCA_WITH_ALIGN:
+ align = (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN
+ ? TREE_INT_CST_LOW (gimple_call_arg (stmt, 1))
+ : BIGGEST_ALIGNMENT);
val.lattice_val = CONSTANT;
val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
val.mask = shwi_to_double_int
- (~(((HOST_WIDE_INT) BIGGEST_ALIGNMENT)
+ (~(((HOST_WIDE_INT) align)
/ BITS_PER_UNIT - 1));
break;
return val;
}
-/* Detects a vla-related alloca with a constant argument. Declares fixed-size
- array and return the address, if found, otherwise returns NULL_TREE. */
+/* Detects a __builtin_alloca_with_align with constant size argument. Declares
+ fixed-size array and returns the address, if found, otherwise returns
+ NULL_TREE. */
static tree
-fold_builtin_alloca_for_var (gimple stmt)
+fold_builtin_alloca_with_align (gimple stmt)
{
unsigned HOST_WIDE_INT size, threshold, n_elem;
tree lhs, arg, block, var, elem_type, array_type;
- unsigned int align;
/* Get lhs. */
lhs = gimple_call_lhs (stmt);
size = TREE_INT_CST_LOW (arg);
- /* Heuristic: don't fold large vlas. */
+ /* Heuristic: don't fold large allocas. */
threshold = (unsigned HOST_WIDE_INT)PARAM_VALUE (PARAM_LARGE_STACK_FRAME);
- /* In case a vla is declared at function scope, it has the same lifetime as a
- declared array, so we allow a larger size. */
+ /* In case the alloca is located at function entry, it has the same lifetime
+ as a declared array, so we allow a larger size. */
block = gimple_block (stmt);
if (!(cfun->after_inlining
&& TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL))
/* Declare array. */
elem_type = build_nonstandard_integer_type (BITS_PER_UNIT, 1);
n_elem = size * 8 / BITS_PER_UNIT;
- align = MIN (size * 8, BIGGEST_ALIGNMENT);
- if (align < BITS_PER_UNIT)
- align = BITS_PER_UNIT;
array_type = build_array_type_nelts (elem_type, n_elem);
var = create_tmp_var (array_type, NULL);
- DECL_ALIGN (var) = align;
+ DECL_ALIGN (var) = TREE_INT_CST_LOW (gimple_call_arg (stmt, 1));
{
struct ptr_info_def *pi = SSA_NAME_PTR_INFO (lhs);
if (pi != NULL && !pi->pt.anything)
if (gimple_call_internal_p (stmt))
return false;
- /* The heuristic of fold_builtin_alloca_for_var differs before and after
- inlining, so we don't require the arg to be changed into a constant
- for folding, but just to be constant. */
- if (gimple_call_alloca_for_var_p (stmt))
+ /* The heuristic of fold_builtin_alloca_with_align differs before and
+ after inlining, so we don't require the arg to be changed into a
+ constant for folding, but just to be constant. */
+ if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
{
- tree new_rhs = fold_builtin_alloca_for_var (stmt);
+ tree new_rhs = fold_builtin_alloca_with_align (stmt);
if (new_rhs)
{
bool res = update_call_from_tree (gsi, new_rhs);
if (!callee
|| DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
/* All regular builtins are ok, just obviously not alloca. */
- || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA)
+ || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
+ || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA_WITH_ALIGN)
return NULL_TREE;
if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
case BUILT_IN_MALLOC:
case BUILT_IN_CALLOC:
case BUILT_IN_ALLOCA:
+ case BUILT_IN_ALLOCA_WITH_ALIGN:
return;
default:;
case BUILT_IN_MALLOC:
case BUILT_IN_CALLOC:
case BUILT_IN_ALLOCA:
+ case BUILT_IN_ALLOCA_WITH_ALIGN:
case BUILT_IN_FREE:
return false;
|| DECL_FUNCTION_CODE (callee) == BUILT_IN_FREE
|| DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END
|| DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
+ || (DECL_FUNCTION_CODE (callee)
+ == BUILT_IN_ALLOCA_WITH_ALIGN)
|| DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE
|| DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE
|| DECL_FUNCTION_CODE (callee) == BUILT_IN_ASSUME_ALIGNED))
"alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
}
+ ftype = build_function_type_list (ptr_type_node, size_type_node,
+ size_type_node, NULL_TREE);
+ local_define_builtin ("__builtin_alloca_with_align", ftype,
+ BUILT_IN_ALLOCA_WITH_ALIGN, "alloca",
+ ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
+
/* If we're checking the stack, `alloca' can throw. */
if (flag_stack_check)
- TREE_NOTHROW (built_in_decls[BUILT_IN_ALLOCA]) = 0;
+ {
+ TREE_NOTHROW (built_in_decls[BUILT_IN_ALLOCA]) = 0;
+ TREE_NOTHROW (built_in_decls[BUILT_IN_ALLOCA_WITH_ALIGN]) = 0;
+ }
ftype = build_function_type_list (void_type_node,
ptr_type_node, ptr_type_node,
const char *name;
if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
- && DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA)
+ && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
+ || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
return true;
name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));