/* Expand a call to the alloca builtin, with arguments ARGLIST. Return 0 if
we failed and the caller should emit a normal call, otherwise try to get
the result in TARGET, if convenient. */
+
static rtx
expand_builtin_alloca (arglist, target)
tree arglist;
rtx target;
{
rtx op0;
+ rtx result;
if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
return 0;
op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
/* Allocate the desired space. */
- return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
+ result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
+
+#ifdef POINTERS_EXTEND_UNSIGNED
+ result = convert_memory_address (ptr_mode, result);
+#endif
+
+ return result;
}
/* Expand a call to the ffs builtin. The arguments are in ARGLIST.
Return 0 if a normal call should be emitted rather than expanding the
function in-line. If convenient, the result should be placed in TARGET.
SUBTARGET may be used as the target for computing one of EXP's operands. */
+
static rtx
expand_builtin_ffs (arglist, target, subtarget)
tree arglist;
/* If the string passed to fputs is a constant and is one character
long, we attempt to transform this call into __builtin_fputc(). */
+
static rtx
expand_builtin_fputs (arglist, ignore)
tree arglist;
#endif
if (MUST_ALIGN)
- {
- if (GET_CODE (size) == CONST_INT)
- size = GEN_INT (INTVAL (size)
- + (BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1));
- else
- size = expand_binop (Pmode, add_optab, size,
- GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
- NULL_RTX, 1, OPTAB_LIB_WIDEN);
- }
+ size
+ = force_operand (plus_constant (size,
+ BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
+ NULL_RTX);
#ifdef SETJMP_VIA_SAVE_AREA
/* If setjmp restores regs from a save area in the stack frame,
#if !defined(PREFERRED_STACK_BOUNDARY) || !defined(MUST_ALIGN) || (PREFERRED_STACK_BOUNDARY != BIGGEST_ALIGNMENT)
/* If anyone creates a target with these characteristics, let them
know that our optimization cannot work correctly in such a case. */
- abort();
+ abort ();
#endif
if (GET_CODE (size) == CONST_INT)
{
- int new = INTVAL (size) / align * align;
+ HOST_WIDE_INT new = INTVAL (size) / align * align;
if (INTVAL (size) != new)
setjmpless_size = GEN_INT (new);
if (flag_stack_check && ! STACK_CHECK_BUILTIN)
probe_stack_range (STACK_CHECK_MAX_FRAME_SIZE + STACK_CHECK_PROTECT, size);
- /* Don't use a TARGET that isn't a pseudo. */
+ /* Don't use a TARGET that isn't a pseudo or is the wrong mode. */
if (target == 0 || GET_CODE (target) != REG
- || REGNO (target) < FIRST_PSEUDO_REGISTER)
+ || REGNO (target) < FIRST_PSEUDO_REGISTER
+ || GET_MODE (target) != Pmode)
target = gen_reg_rtx (Pmode);
mark_reg_pointer (target, known_align);
if (mode == VOIDmode)
mode = Pmode;
- size = convert_modes (mode, ptr_mode, size, 1);
pred = insn_data[(int) CODE_FOR_allocate_stack].operand[1].predicate;
if (pred && ! ((*pred) (size, mode)))
size = copy_to_mode_reg (mode, size);
#ifndef STACK_GROWS_DOWNWARD
emit_move_insn (target, virtual_stack_dynamic_rtx);
#endif
- size = convert_modes (Pmode, ptr_mode, size, 1);
/* Check stack bounds if necessary. */
if (current_function_limit_stack)
REG_NOTES (note_target));
}
#endif /* SETJMP_VIA_SAVE_AREA */
+
#ifdef STACK_GROWS_DOWNWARD
emit_move_insn (target, virtual_stack_dynamic_rtx);
#endif