/* Expand builtin functions.
Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
- 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
- Free Software Foundation, Inc.
+ 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
+ 2012 Free Software Foundation, Inc.
This file is part of GCC.
return align;
}
-/* Return the alignment in bits of EXP, a pointer valued expression.
- The alignment returned is, by default, the alignment of the thing that
- EXP points to. If it is not a POINTER_TYPE, 0 is returned.
+/* For a pointer valued expression EXP compute values M and N such that
+ M divides (EXP - N) and such that N < M. Store N in *BITPOSP and return M.
- Otherwise, look at the expression to see if we can do better, i.e., if the
- expression is actually pointing at an object whose alignment is tighter. */
+ If EXP is not a pointer, 0 is returned. */
unsigned int
-get_pointer_alignment (tree exp)
+get_pointer_alignment_1 (tree exp, unsigned HOST_WIDE_INT *bitposp)
{
STRIP_NOPS (exp);
if (TREE_CODE (exp) == ADDR_EXPR)
- return get_object_alignment (TREE_OPERAND (exp, 0));
+ return get_object_alignment_1 (TREE_OPERAND (exp, 0), bitposp);
else if (TREE_CODE (exp) == SSA_NAME
&& POINTER_TYPE_P (TREE_TYPE (exp)))
{
struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
- unsigned align;
if (!pi)
- return BITS_PER_UNIT;
- if (pi->misalign != 0)
- align = (pi->misalign & -pi->misalign);
- else
- align = pi->align;
- return align * BITS_PER_UNIT;
+ {
+ *bitposp = 0;
+ return BITS_PER_UNIT;
+ }
+ *bitposp = pi->misalign * BITS_PER_UNIT;
+ return pi->align * BITS_PER_UNIT;
}
+ *bitposp = 0;
return POINTER_TYPE_P (TREE_TYPE (exp)) ? BITS_PER_UNIT : 0;
}
+/* Return the alignment in bits of EXP, a pointer valued expression.
+ The alignment returned is, by default, the alignment of the thing that
+ EXP points to. If it is not a POINTER_TYPE, 0 is returned.
+
+ Otherwise, look at the expression to see if we can do better, i.e., if the
+ expression is actually pointing at an object whose alignment is tighter. */
+
+unsigned int
+get_pointer_alignment (tree exp)
+{
+ unsigned HOST_WIDE_INT bitpos = 0;
+ unsigned int align;
+
+ align = get_pointer_alignment_1 (exp, &bitpos);
+
+ /* align and bitpos now specify known low bits of the pointer.
+ ptr & (align - 1) == bitpos. */
+
+ if (bitpos != 0)
+ align = (bitpos & -bitpos);
+
+ return align;
+}
+
/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
way, because it could contain a zero byte in the middle.
TREE_STRING_LENGTH is the size of the character array, not the string.
}
static rtx
-expand_builtin_init_trampoline (tree exp)
+expand_builtin_init_trampoline (tree exp, bool onstack)
{
tree t_tramp, t_func, t_chain;
rtx m_tramp, r_tramp, r_chain, tmp;
m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
MEM_NOTRAP_P (m_tramp) = 1;
- /* The TRAMP argument should be the address of a field within the
- local function's FRAME decl. Let's see if we can fill in the
- to fill in the MEM_ATTRs for this memory. */
+ /* If ONSTACK, the TRAMP argument should be the address of a field
+ within the local function's FRAME decl. Either way, let's see if
+ we can fill in the MEM_ATTRs for this memory. */
if (TREE_CODE (t_tramp) == ADDR_EXPR)
set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
true, 0);
+ /* Creator of a heap trampoline is responsible for making sure the
+ address is aligned to at least STACK_BOUNDARY. Normally malloc
+ will ensure this anyhow. */
tmp = round_trampoline_addr (r_tramp);
if (tmp != r_tramp)
{
/* Generate insns to initialize the trampoline. */
targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
- trampolines_created = 1;
+ if (onstack)
+ {
+ trampolines_created = 1;
- warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
- "trampoline generated for nested function %qD", t_func);
+ warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
+ "trampoline generated for nested function %qD", t_func);
+ }
return const0_rtx;
}
/* If the object has smaller alignment, the the lock free routines cannot
be used. */
if (type_align < mode_align)
- return integer_zero_node;
+ return boolean_false_node;
/* Check if a compare_and_swap pattern exists for the mode which represents
the required size. The pattern is not allowed to fail, so the existence
of the pattern indicates support is present. */
if (can_compare_and_swap_p (mode, true))
- return integer_one_node;
+ return boolean_true_node;
else
- return integer_zero_node;
+ return boolean_false_node;
}
/* Return true if the parameters to call EXP represent an object which will
}
size = fold_builtin_atomic_always_lock_free (arg0, arg1);
- if (size == integer_one_node)
+ if (size == boolean_true_node)
return const1_rtx;
return const0_rtx;
}
return NULL_TREE;
/* If it isn't always lock free, don't generate a result. */
- if (fold_builtin_atomic_always_lock_free (arg0, arg1) == integer_one_node)
- return integer_one_node;
+ if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
+ return boolean_true_node;
return NULL_TREE;
}
/* If the value is known at compile time, return the RTX for it. */
size = fold_builtin_atomic_is_lock_free (arg0, arg1);
- if (size == integer_one_node)
+ if (size == boolean_true_node)
return const1_rtx;
return NULL_RTX;
set of builtins. */
if (!optimize
&& !called_as_built_in (fndecl)
- && DECL_ASSEMBLER_NAME_SET_P (fndecl)
&& fcode != BUILT_IN_ALLOCA
&& fcode != BUILT_IN_ALLOCA_WITH_ALIGN
&& fcode != BUILT_IN_FREE)
return const0_rtx;
case BUILT_IN_INIT_TRAMPOLINE:
- return expand_builtin_init_trampoline (exp);
+ return expand_builtin_init_trampoline (exp, true);
+ case BUILT_IN_INIT_HEAP_TRAMPOLINE:
+ return expand_builtin_init_trampoline (exp, false);
case BUILT_IN_ADJUST_TRAMPOLINE:
return expand_builtin_adjust_trampoline (exp);
case BUILT_IN_STRCPY_CHK:
case BUILT_IN_STPCPY_CHK:
case BUILT_IN_STRNCPY_CHK:
+ case BUILT_IN_STPNCPY_CHK:
case BUILT_IN_STRCAT_CHK:
case BUILT_IN_STRNCAT_CHK:
case BUILT_IN_SNPRINTF_CHK:
DECL_FUNCTION_CODE (fndecl));
case BUILT_IN_STRNCPY_CHK:
- return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
+ case BUILT_IN_STPNCPY_CHK:
+ return fold_builtin_stxncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE,
+ ignore, fcode);
case BUILT_IN_STRNCAT_CHK:
return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
break;
case BUILT_IN_STRNCAT_CHK:
case BUILT_IN_STRNCPY_CHK:
+ case BUILT_IN_STPNCPY_CHK:
len = CALL_EXPR_ARG (exp, 2);
size = CALL_EXPR_ARG (exp, 3);
break;
return build_call_expr_loc (loc, fn, 2, dest, src);
}
-/* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
+/* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
are the arguments to the call. If MAXLEN is not NULL, it is maximum
- length passed as third argument. */
+ length passed as third argument. IGNORE is true if return value can be
+ ignored. FCODE is the BUILT_IN_* code of the builtin. */
tree
-fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
- tree len, tree size, tree maxlen)
+fold_builtin_stxncpy_chk (location_t loc, tree dest, tree src,
+ tree len, tree size, tree maxlen, bool ignore,
+ enum built_in_function fcode)
{
tree fn;
|| !validate_arg (size, INTEGER_TYPE))
return NULL_TREE;
+ if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
+ {
+ /* If return value of __stpncpy_chk is ignored,
+ optimize into __strncpy_chk. */
+ fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
+ if (fn)
+ return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
+ }
+
if (! host_integerp (size, 1))
return NULL_TREE;
return NULL_TREE;
}
- /* If __builtin_strncpy_chk is used, assume strncpy is available. */
- fn = builtin_decl_explicit (BUILT_IN_STRNCPY);
+ /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
+ fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
+ ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
if (!fn)
return NULL_TREE;