+Thu Jun 14 12:44:15 CEST 2001 Jan Hubicka <jh@suse.cz>
+
+ * i386.c (x86_accumulate_outgoing_args, x86_prologue_using_move,
+ x86_epilogue_using_move): New global variables.
+ (override_options): Enable ACCUMULATE_OUTGOING_ARGS if preferred.
+ (ix86_emit_save_regs_using_mov): New static function.
+ (ix86_expand_prologue, ix86_expand_epilogue): Use moves if preferred.
+ * i386.h (MASK_MMX, MASK_SSE, MASK_SSE2, MASK_128BIT_LONG_DOUBLE,
+ MASK_MIX_SSE_I387): Renumber.
+ (MASK_NO_ACCUMULATE_OUTGOING_ARGS): New.
+ (x86_accumulate_outgoing_args, x86_prologue_using_move,
+ x86_epilogue_using_move): Declare.
+ (TARGET_PROLOGUE_USING_MOVE, TARGET_EPILOGUE_USING_MOVE): New.
+
2001-06-13 John David Anglin <dave@hiauly1.hia.nrc.ca>
* inclhack.def (hpux10_cpp_pow_inline): New hack.
const int x86_integer_DFmode_moves = ~(m_ATHLON | m_PENT4);
const int x86_partial_reg_dependency = m_ATHLON | m_PENT4;
const int x86_memory_mismatch_stall = m_ATHLON | m_PENT4;
+const int x86_accumulate_outgoing_args = m_ATHLON | m_PENT4 | m_PPRO;
+const int x86_prologue_using_move = m_ATHLON | m_PENT4 | m_PPRO;
+const int x86_epilogue_using_move = m_ATHLON | m_PENT4 | m_PPRO;
#define AT_BP(mode) (gen_rtx_MEM ((mode), hard_frame_pointer_rtx))
static int ix86_safe_length_prefix PARAMS ((rtx));
static int ix86_nsaved_regs PARAMS((void));
static void ix86_emit_save_regs PARAMS((void));
+static void ix86_emit_save_regs_using_mov PARAMS ((rtx, HOST_WIDE_INT));
static void ix86_emit_restore_regs_using_mov PARAMS ((rtx, int, int));
static void ix86_set_move_mem_attrs_1 PARAMS ((rtx, rtx, rtx, rtx, rtx));
static void ix86_sched_reorder_pentium PARAMS((rtx *, rtx *));
on by -msse. */
if (TARGET_SSE)
target_flags |= MASK_MMX;
+
+ if ((x86_accumulate_outgoing_args & CPUMASK)
+ && !(target_flags & MASK_NO_ACCUMULATE_OUTGOING_ARGS)
+ && !optimize_size)
+ target_flags |= MASK_ACCUMULATE_OUTGOING_ARGS;
}
\f
void
}
}
+/* Emit code to save registers using MOV insns. First register
+ is restored from POINTER + OFFSET. */
+static void
+ix86_emit_save_regs_using_mov (pointer, offset)
+ rtx pointer;
+ HOST_WIDE_INT offset;
+{
+ int regno;
+ rtx insn;
+
+ for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
+ if (ix86_save_reg (regno, true))
+ {
+ insn = emit_move_insn (adj_offsettable_operand (gen_rtx_MEM (Pmode,
+ pointer),
+ offset),
+ gen_rtx_REG (Pmode, regno));
+ RTX_FRAME_RELATED_P (insn) = 1;
+ offset += UNITS_PER_WORD;
+ }
+}
+
/* Expand the prologue into a bunch of separate insns. */
void
|| current_function_uses_const_pool)
&& !TARGET_64BIT);
struct ix86_frame frame;
+ int use_mov = (TARGET_PROLOGUE_USING_MOVE && !optimize_size);
+ HOST_WIDE_INT allocate;
ix86_compute_frame_layout (&frame);
RTX_FRAME_RELATED_P (insn) = 1;
}
- ix86_emit_save_regs ();
+ allocate = frame.to_allocate;
+ /* In case we are dealing only with single register and empty frame,
+ push is equivalent of the mov+add sequence. */
+ if (allocate == 0 && frame.nregs <= 1)
+ use_mov = 0;
- if (frame.to_allocate == 0)
+ if (!use_mov)
+ ix86_emit_save_regs ();
+ else
+ allocate += frame.nregs * UNITS_PER_WORD;
+
+ if (allocate == 0)
;
else if (! TARGET_STACK_PROBE || frame.to_allocate < CHECK_STACK_LIMIT)
{
abort();
arg0 = gen_rtx_REG (SImode, 0);
- emit_move_insn (arg0, GEN_INT (frame.to_allocate));
+ emit_move_insn (arg0, GEN_INT (allocate));
sym = gen_rtx_MEM (FUNCTION_MODE,
gen_rtx_SYMBOL_REF (Pmode, "_alloca"));
= gen_rtx_EXPR_LIST (VOIDmode, gen_rtx_USE (VOIDmode, arg0),
CALL_INSN_FUNCTION_USAGE (insn));
}
+ if (use_mov)
+ {
+ if (!frame_pointer_needed || !frame.to_allocate)
+ ix86_emit_save_regs_using_mov (stack_pointer_rtx, frame.to_allocate);
+ else
+ ix86_emit_save_regs_using_mov (hard_frame_pointer_rtx,
+ -frame.nregs * UNITS_PER_WORD);
+ }
#ifdef SUBTARGET_PROLOGUE
SUBTARGET_PROLOGUE;
emit_insn (gen_blockage ());
}
-
/* Emit code to restore saved registers using MOV insns. First register
is restored from POINTER + OFFSET. */
static void
and there is exactly one register to pop. This heruistic may need some
tuning in future. */
if ((!sp_valid && frame.nregs <= 1)
+ || (TARGET_EPILOGUE_USING_MOVE && !optimize_size
+ && (frame.nregs > 1 || frame.to_allocate))
|| (frame_pointer_needed && !frame.nregs && frame.to_allocate)
|| (frame_pointer_needed && TARGET_USE_LEAVE && !optimize_size
&& frame.nregs == 1)
#define MASK_INLINE_ALL_STROPS 0x00002000 /* Inline stringops in all cases */
#define MASK_NO_PUSH_ARGS 0x00004000 /* Use push instructions */
#define MASK_ACCUMULATE_OUTGOING_ARGS 0x00008000/* Accumulate outgoing args */
-#define MASK_MMX 0x00010000 /* Support MMX regs/builtins */
-#define MASK_SSE 0x00020000 /* Support SSE regs/builtins */
-#define MASK_SSE2 0x00040000 /* Support SSE2 regs/builtins */
-#define MASK_128BIT_LONG_DOUBLE 0x00080000 /* long double size is 128bit */
-#define MASK_MIX_SSE_I387 0x00100000 /* Mix SSE and i387 instructions */
-#define MASK_64BIT 0x00200000 /* Produce 64bit code */
-#define MASK_NO_RED_ZONE 0x00400000 /* Do not use red zone */
+#define MASK_NO_ACCUMULATE_OUTGOING_ARGS 0x00010000
+#define MASK_MMX 0x00020000 /* Support MMX regs/builtins */
+#define MASK_SSE 0x00040000 /* Support SSE regs/builtins */
+#define MASK_SSE2 0x00080000 /* Support SSE2 regs/builtins */
+#define MASK_128BIT_LONG_DOUBLE 0x00100000 /* long double size is 128bit */
+#define MASK_MIX_SSE_I387 0x00200000 /* Mix SSE and i387 instructions */
+#define MASK_64BIT 0x00400000 /* Produce 64bit code */
+#define MASK_NO_RED_ZONE 0x00800000 /* Do not use red zone */
/* Temporary codegen switches */
#define MASK_INTEL_SYNTAX 0x00000200
extern const int x86_promote_hi_regs, x86_integer_DFmode_moves;
extern const int x86_add_esp_4, x86_add_esp_8, x86_sub_esp_4, x86_sub_esp_8;
extern const int x86_partial_reg_dependency, x86_memory_mismatch_stall;
+extern const int x86_accumulate_outgoing_args, x86_prologue_using_move;
+extern const int x86_epilogue_using_move;
#define TARGET_USE_LEAVE (x86_use_leave & CPUMASK)
#define TARGET_PUSH_MEMORY (x86_push_memory & CPUMASK)
#define TARGET_INTEGER_DFMODE_MOVES (x86_integer_DFmode_moves & CPUMASK)
#define TARGET_PARTIAL_REG_DEPENDENCY (x86_partial_reg_dependency & CPUMASK)
#define TARGET_MEMORY_MISMATCH_STALL (x86_memory_mismatch_stall & CPUMASK)
+#define TARGET_PROLOGUE_USING_MOVE (x86_prologue_using_move & CPUMASK)
+#define TARGET_EPILOGUE_USING_MOVE (x86_epilogue_using_move & CPUMASK)
#define TARGET_STACK_PROBE (target_flags & MASK_STACK_PROBE)