(define_insn "*pushsi2"
[(set (match_operand:SI 0 "push_operand" "=<")
(match_operand:SI 1 "general_no_elim_operand" "ri*m"))]
- ""
+ "!TARGET_64BIT"
"push{l}\\t%1"
[(set_attr "type" "push")
(set_attr "mode" "SI")])
+;; For 64BIT abi we always round up to 8 bytes.
+(define_insn "*pushsi2_rex64"
+ [(set (match_operand:SI 0 "push_operand" "=X")
+ (match_operand:SI 1 "nonmemory_no_elim_operand" "ri"))]
+ "TARGET_64BIT"
+ "push{q}\\t%q1"
+ [(set_attr "type" "push")
+ (set_attr "mode" "SI")])
+
(define_insn "*pushsi2_prologue"
[(set (match_operand:SI 0 "push_operand" "=<")
(match_operand:SI 1 "general_no_elim_operand" "ri*m"))
(set (reg:SI 6) (reg:SI 6))]
- ""
+ "!TARGET_64BIT"
"push{l}\\t%1"
[(set_attr "type" "push")
(set_attr "mode" "SI")])
(set_attr "modrm" "0,*,0,*,*,*")
(set_attr "mode" "SI")])
+;; Stores and loads of ax to arbitary constant address.
+;; We fake an second form of instruction to force reload to load address
+;; into register when rax is not available
+(define_insn "*movabssi_1_rex64"
+ [(set (mem:SI (match_operand:DI 0 "x86_64_movabs_operand" "i,r,r"))
+ (match_operand:SI 1 "nonmemory_operand" "a,er,i"))]
+ "TARGET_64BIT"
+ "@
+ movabs{l}\\t{%1, %P0|%P0, %1}
+ mov{l}\\t{%1, %a0|%a0, %1}
+ movabs{l}\\t{%1, %a0|%a0, %1}"
+ [(set_attr "type" "imov")
+ (set_attr "modrm" "0,*,*")
+ (set_attr "length_address" "8,0,0")
+ (set_attr "length_immediate" "0,*,*")
+ (set_attr "memory" "store")
+ (set_attr "mode" "SI")])
+
+(define_insn "*movabssi_2_rex64"
+ [(set (match_operand:SI 0 "register_operand" "=a,r")
+ (mem:SI (match_operand:DI 1 "x86_64_movabs_operand" "i,r")))]
+ "TARGET_64BIT"
+ "@
+ movabs{l}\\t{%P1, %0|%0, %P1}
+ mov{l}\\t{%a1, %0|%0, %a1}"
+ [(set_attr "type" "imov")
+ (set_attr "modrm" "0,*")
+ (set_attr "length_address" "8,0")
+ (set_attr "length_immediate" "0")
+ (set_attr "memory" "load")
+ (set_attr "mode" "SI")])
+
(define_insn "*swapsi"
[(set (match_operand:SI 0 "register_operand" "+r")
(match_operand:SI 1 "register_operand" "+r"))
(const_string "HI")))
(set_attr "modrm" "0,*,*,0,*,*")])
+;; Stores and loads of ax to arbitary constant address.
+;; We fake an second form of instruction to force reload to load address
+;; into register when rax is not available
+(define_insn "*movabshi_1_rex64"
+ [(set (mem:HI (match_operand:DI 0 "x86_64_movabs_operand" "i,r,r"))
+ (match_operand:HI 1 "nonmemory_operand" "a,er,i"))]
+ "TARGET_64BIT"
+ "@
+ movabs{w}\\t{%1, %P0|%P0, %1}
+ mov{w}\\t{%1, %a0|%a0, %1}
+ movabs{w}\\t{%1, %a0|%a0, %1}"
+ [(set_attr "type" "imov")
+ (set_attr "modrm" "0,*,*")
+ (set_attr "length_address" "8,0,0")
+ (set_attr "length_immediate" "0,*,*")
+ (set_attr "memory" "store")
+ (set_attr "mode" "HI")])
+
+(define_insn "*movabshi_2_rex64"
+ [(set (match_operand:HI 0 "register_operand" "=a,r")
+ (mem:HI (match_operand:DI 1 "x86_64_movabs_operand" "i,r")))]
+ "TARGET_64BIT"
+ "@
+ movabs{w}\\t{%P1, %0|%0, %P1}
+ mov{w}\\t{%a1, %0|%0, %a1}"
+ [(set_attr "type" "imov")
+ (set_attr "modrm" "0,*")
+ (set_attr "length_address" "8,0")
+ (set_attr "length_immediate" "0")
+ (set_attr "memory" "load")
+ (set_attr "mode" "HI")])
+
(define_insn "*swaphi_1"
[(set (match_operand:HI 0 "register_operand" "+r")
(match_operand:HI 1 "register_operand" "+r"))
(set_attr "mode" "SI")])
(define_insn "*movqi_extv_1"
- [(set (match_operand:QI 0 "nonimmediate_operand" "=qm,?r")
- (sign_extract:QI (match_operand:SI 1 "register_operand" "q,q")
+ [(set (match_operand:QI 0 "nonimmediate_operand" "=Qm,?r")
+ (sign_extract:QI (match_operand:SI 1 "ext_register_operand" "Q,Q")
(const_int 8)
(const_int 8)))]
- ""
+ "!TARGET_64BIT"
"*
{
switch (get_attr_type (insn))
(const_string "SI")
(const_string "QI")))])
+(define_insn "*movqi_extv_1_rex64"
+ [(set (match_operand:QI 0 "register_operand" "=Q,?R")
+ (sign_extract:QI (match_operand:SI 1 "ext_register_operand" "Q,Q")
+ (const_int 8)
+ (const_int 8)))]
+ "TARGET_64BIT"
+ "*
+{
+ switch (get_attr_type (insn))
+ {
+ case TYPE_IMOVX:
+ return \"movs{bl|x}\\t{%h1, %k0|%k0, %h1}\";
+ default:
+ return \"mov{b}\\t{%h1, %0|%0, %h1}\";
+ }
+}"
+ [(set (attr "type")
+ (if_then_else (and (match_operand:QI 0 "register_operand" "")
+ (ior (not (match_operand:QI 0 "q_regs_operand" ""))
+ (ne (symbol_ref "TARGET_MOVX")
+ (const_int 0))))
+ (const_string "imovx")
+ (const_string "imov")))
+ (set (attr "mode")
+ (if_then_else (eq_attr "type" "imovx")
+ (const_string "SI")
+ (const_string "QI")))])
+
+;; Stores and loads of ax to arbitary constant address.
+;; We fake an second form of instruction to force reload to load address
+;; into register when rax is not available
+(define_insn "*movabsqi_1_rex64"
+ [(set (mem:QI (match_operand:DI 0 "x86_64_movabs_operand" "i,r,r"))
+ (match_operand:QI 1 "nonmemory_operand" "a,er,i"))]
+ "TARGET_64BIT"
+ "@
+ movabs{q}\\t{%1, %P0|%P0, %1}
+ mov{q}\\t{%1, %a0|%a0, %1}
+ movabs{q}\\t{%1, %a0|%a0, %1}"
+ [(set_attr "type" "imov")
+ (set_attr "modrm" "0,*,*")
+ (set_attr "length_address" "8,0,0")
+ (set_attr "length_immediate" "0,*,*")
+ (set_attr "memory" "store")
+ (set_attr "mode" "QI")])
+
+(define_insn "*movabsqi_2_rex64"
+ [(set (match_operand:QI 0 "register_operand" "=a,r")
+ (mem:QI (match_operand:DI 1 "x86_64_movabs_operand" "i,r")))]
+ "TARGET_64BIT"
+ "@
+ movabs{q}\\t{%P1, %0|%0, %P1}
+ mov{q}\\t{%a1, %0|%0, %a1}"
+ [(set_attr "type" "imov")
+ (set_attr "modrm" "0,*")
+ (set_attr "length_address" "8,0")
+ (set_attr "length_immediate" "0")
+ (set_attr "memory" "load")
+ (set_attr "mode" "QI")])
+
(define_insn "*movsi_extzv_1"
[(set (match_operand:SI 0 "register_operand" "=R")
(zero_extract:SI (match_operand 1 "ext_register_operand" "Q")
"!TARGET_64BIT"
"#")
+(define_insn "pushdi2_rex64"
+ [(set (match_operand:DI 0 "push_operand" "=<,!<")
+ (match_operand:DI 1 "general_no_elim_operand" "re*m,n"))]
+ "TARGET_64BIT"
+ "@
+ push{q}\\t%1
+ #"
+ [(set_attr "type" "push,multi")
+ (set_attr "mode" "DI")])
+
+;; Convert impossible pushes of immediate to existing instructions.
+;; First try to get scratch register and go trought it. In case this
+;; fails, push sign extended lower part first and then overwrite
+;; upper part by 32bit move.
+(define_peephole2
+ [(match_scratch:DI 2 "r")
+ (set (match_operand:DI 0 "push_operand" "")
+ (match_operand:DI 1 "immediate_operand" ""))]
+ "TARGET_64BIT && !symbolic_operand (operands[1], DImode)
+ && !x86_64_immediate_operand (operands[1], DImode)"
+ [(set (match_dup 2) (match_dup 1))
+ (set (match_dup 0) (match_dup 2))]
+ "")
+
+;; We need to define this as both peepholer and splitter for case
+;; peephole2 pass is not run.
+(define_peephole2
+ [(set (match_operand:DI 0 "push_operand" "")
+ (match_operand:DI 1 "immediate_operand" ""))]
+ "TARGET_64BIT && !symbolic_operand (operands[1], DImode)
+ && !x86_64_immediate_operand (operands[1], DImode) && 1"
+ [(set (match_dup 0) (match_dup 1))
+ (set (match_dup 2) (match_dup 3))]
+ "split_di (operands + 1, 1, operands + 2, operands + 3);
+ operands[1] = gen_lowpart (DImode, operands[2]);
+ operands[2] = gen_rtx_MEM (SImode, gen_rtx_PLUS (DImode, stack_pointer_rtx,
+ GEN_INT (4)));
+ ")
+
+(define_split
+ [(set (match_operand:DI 0 "push_operand" "")
+ (match_operand:DI 1 "immediate_operand" ""))]
+ "TARGET_64BIT && (flow2_completed || (reload_completed && !flag_peephole2))
+ && !symbolic_operand (operands[1], DImode)
+ && !x86_64_immediate_operand (operands[1], DImode)"
+ [(set (match_dup 0) (match_dup 1))
+ (set (match_dup 2) (match_dup 3))]
+ "split_di (operands + 1, 1, operands + 2, operands + 3);
+ operands[1] = gen_lowpart (DImode, operands[2]);
+ operands[2] = gen_rtx_MEM (SImode, gen_rtx_PLUS (DImode, stack_pointer_rtx,
+ GEN_INT (4)));
+ ")
+
+(define_insn "*pushdi2_prologue_rex64"
+ [(set (match_operand:DI 0 "push_operand" "=<")
+ (match_operand:DI 1 "general_no_elim_operand" "re*m"))
+ (set (reg:DI 6) (reg:DI 6))]
+ "TARGET_64BIT"
+ "push{q}\\t%1"
+ [(set_attr "type" "push")
+ (set_attr "mode" "DI")])
+
+(define_insn "*popdi1_epilogue_rex64"
+ [(set (match_operand:DI 0 "nonimmediate_operand" "=r*m")
+ (mem:DI (reg:DI 7)))
+ (set (reg:DI 7)
+ (plus:DI (reg:DI 7) (const_int 8)))
+ (set (reg:DI 6) (reg:DI 6))]
+ "TARGET_64BIT"
+ "pop{q}\\t%0"
+ [(set_attr "type" "pop")
+ (set_attr "mode" "DI")])
+
+(define_insn "popdi1"
+ [(set (match_operand:DI 0 "nonimmediate_operand" "=r*m")
+ (mem:DI (reg:DI 7)))
+ (set (reg:DI 7)
+ (plus:DI (reg:DI 7) (const_int 8)))]
+ "TARGET_64BIT"
+ "pop{q}\\t%0"
+ [(set_attr "type" "pop")
+ (set_attr "mode" "DI")])
+
+(define_insn "*movdi_xor_rex64"
+ [(set (match_operand:DI 0 "register_operand" "=r")
+ (match_operand:DI 1 "const0_operand" "i"))
+ (clobber (reg:CC 17))]
+ "reload_completed && (!TARGET_USE_MOV0 || optimize_size)
+ && TARGET_64BIT"
+ "xor{l}\\t{%k0, %k0|%k0, %k0}"
+ [(set_attr "type" "alu1")
+ (set_attr "mode" "SI")
+ (set_attr "length_immediate" "0")])
+
+(define_insn "*movdi_or_rex64"
+ [(set (match_operand:DI 0 "register_operand" "=r")
+ (match_operand:DI 1 "const_int_operand" "i"))
+ (clobber (reg:CC 17))]
+ "reload_completed && GET_CODE (operands[1]) == CONST_INT
+ && TARGET_64BIT
+ && INTVAL (operands[1]) == -1
+ && (TARGET_PENTIUM || optimize_size)"
+ "*
+{
+ operands[1] = constm1_rtx;
+ return \"or{q}\\t{%1, %0|%0, %1}\";
+}"
+ [(set_attr "type" "alu1")
+ (set_attr "mode" "DI")
+ (set_attr "length_immediate" "1")])
+
(define_insn "*movdi_2"
[(set (match_operand:DI 0 "nonimmediate_operand" "=r,o,!m*y,!*y")
(match_operand:DI 1 "general_operand" "riFo,riF,*y,m"))]
(define_split
[(set (match_operand:DI 0 "push_operand" "")
(match_operand:DI 1 "general_operand" ""))]
- "reload_completed && ! MMX_REG_P (operands[1]) && !TARGET_64BIT"
+ "!TARGET_64BIT && reload_completed && ! MMX_REG_P (operands[1])"
[(const_int 0)]
"if (!ix86_split_long_move (operands)) abort (); DONE;")
(define_split
[(set (match_operand:DI 0 "nonimmediate_operand" "")
(match_operand:DI 1 "general_operand" ""))]
- "reload_completed && ! MMX_REG_P (operands[0]) && ! MMX_REG_P (operands[1])"
+ "!TARGET_64BIT && reload_completed && ! MMX_REG_P (operands[0])
+ && ! MMX_REG_P (operands[1])"
[(set (match_dup 2) (match_dup 5))
(set (match_dup 3) (match_dup 6))]
"if (ix86_split_long_move (operands)) DONE;")
+
+(define_insn "*movdi_1_rex64"
+ [(set (match_operand:DI 0 "nonimmediate_operand" "=r,r,r,mr,!mr,!m*y,!*y,*m,*Y")
+ (match_operand:DI 1 "general_operand" "Z,rem,i,re,n,*y,m,*Y,*m"))]
+ "(GET_CODE (operands[0]) != MEM || GET_CODE (operands[1]) != MEM)
+ && TARGET_64BIT"
+ "*
+{
+ switch (get_attr_type (insn))
+ {
+ case TYPE_SSE:
+ case TYPE_MMX:
+ return \"movd\\t{%1, %0|%0, %1}\";
+ case TYPE_MULTI:
+ return \"#\";
+ case TYPE_LEA:
+ return \"lea{q}\\t{%a1, %0|%0, %a1}\";
+ default:
+ if (flag_pic && SYMBOLIC_CONST (operands[1]))
+ abort ();
+ if (get_attr_mode (insn) == MODE_SI)
+ return \"mov{l}\\t{%k1, %k0|%k0, %k1}\";
+ else if (which_alternative == 2)
+ return \"movabs{q}\\t{%1, %0|%0, %1}\";
+ else
+ return \"mov{q}\\t{%1, %0|%0, %1}\";
+ }
+}"
+ [(set (attr "type")
+ (cond [(eq_attr "alternative" "5,6")
+ (const_string "mmx")
+ (eq_attr "alternative" "7,8")
+ (const_string "sse")
+ (eq_attr "alternative" "4")
+ (const_string "multi")
+ (and (ne (symbol_ref "flag_pic") (const_int 0))
+ (match_operand:DI 1 "symbolic_operand" ""))
+ (const_string "lea")
+ ]
+ (const_string "imov")))
+ (set_attr "modrm" "*,0,0,*,*,*,*,*,*")
+ (set_attr "length_immediate" "*,4,8,*,*,*,*,*,*")
+ (set_attr "mode" "SI,DI,DI,DI,SI,DI,DI,DI,DI")])
+
+;; Stores and loads of ax to arbitary constant address.
+;; We fake an second form of instruction to force reload to load address
+;; into register when rax is not available
+(define_insn "*movabsdi_1_rex64"
+ [(set (mem:DI (match_operand:DI 0 "x86_64_movabs_operand" "i,r,r"))
+ (match_operand:DI 1 "nonmemory_operand" "a,er,i"))]
+ "TARGET_64BIT"
+ "@
+ movabs{q}\\t{%1, %P0|%P0, %1}
+ mov{q}\\t{%1, %a0|%a0, %1}
+ movabs{q}\\t{%1, %a0|%a0, %1}"
+ [(set_attr "type" "imov")
+ (set_attr "modrm" "0,*,*")
+ (set_attr "length_address" "8,0,0")
+ (set_attr "length_immediate" "0,*,*")
+ (set_attr "memory" "store")
+ (set_attr "mode" "DI")])
+
+(define_insn "*movabsdi_2_rex64"
+ [(set (match_operand:DI 0 "register_operand" "=a,r")
+ (mem:DI (match_operand:DI 1 "x86_64_movabs_operand" "i,r")))]
+ "TARGET_64BIT"
+ "@
+ movabs{q}\\t{%P1, %0|%0, %P1}
+ mov{q}\\t{%a1, %0|%0, %a1}"
+ [(set_attr "type" "imov")
+ (set_attr "modrm" "0,*")
+ (set_attr "length_address" "8,0")
+ (set_attr "length_immediate" "0")
+ (set_attr "memory" "load")
+ (set_attr "mode" "DI")])
+
+;; Convert impossible stores of immediate to existing instructions.
+;; First try to get scratch register and go trought it. In case this
+;; fails, move by 32bit parts.
+(define_peephole2
+ [(match_scratch:DI 2 "r")
+ (set (match_operand:DI 0 "memory_operand" "")
+ (match_operand:DI 1 "immediate_operand" ""))]
+ "TARGET_64BIT && !symbolic_operand (operands[1], DImode)
+ && !x86_64_immediate_operand (operands[1], DImode)"
+ [(set (match_dup 2) (match_dup 1))
+ (set (match_dup 0) (match_dup 2))]
+ "")
+
+;; We need to define this as both peepholer and splitter for case
+;; peephole2 pass is not run.
+(define_peephole2
+ [(set (match_operand:DI 0 "memory_operand" "")
+ (match_operand:DI 1 "immediate_operand" ""))]
+ "TARGET_64BIT && !symbolic_operand (operands[1], DImode)
+ && !x86_64_immediate_operand (operands[1], DImode) && 1"
+ [(set (match_dup 2) (match_dup 3))
+ (set (match_dup 4) (match_dup 5))]
+ "split_di (operands, 2, operands + 2, operands + 4);")
+
+(define_split
+ [(set (match_operand:DI 0 "memory_operand" "")
+ (match_operand:DI 1 "immediate_operand" ""))]
+ "TARGET_64BIT && (flow2_completed || (reload_completed && !flag_peephole2))
+ && !symbolic_operand (operands[1], DImode)
+ && !x86_64_immediate_operand (operands[1], DImode)"
+ [(set (match_dup 2) (match_dup 3))
+ (set (match_dup 4) (match_dup 5))]
+ "split_di (operands, 2, operands + 2, operands + 4);")
+
+(define_insn "*swapdi_rex64"
+ [(set (match_operand:DI 0 "register_operand" "+r")
+ (match_operand:DI 1 "register_operand" "+r"))
+ (set (match_dup 1)
+ (match_dup 0))]
+ "TARGET_64BIT"
+ "xchg{q}\\t%1, %0"
+ [(set_attr "type" "imov")
+ (set_attr "pent_pair" "np")
+ (set_attr "athlon_decode" "vector")
+ (set_attr "mode" "DI")
+ (set_attr "modrm" "0")
+ (set_attr "ppro_uops" "few")])
+
(define_expand "movsf"
[(set (match_operand:SF 0 "nonimmediate_operand" "")
(define_insn "*pushsf"
[(set (match_operand:SF 0 "push_operand" "=<,<,<")
(match_operand:SF 1 "general_no_elim_operand" "f#rx,rFm#fx,x#rf"))]
- ""
+ "!TARGET_64BIT"
"*
{
switch (which_alternative)
[(set_attr "type" "multi,push,multi")
(set_attr "mode" "SF,SI,SF")])
+(define_insn "*pushsf_rex64"
+ [(set (match_operand:SF 0 "push_operand" "=X,X,X")
+ (match_operand:SF 1 "nonmemory_no_elim_operand" "f#rx,rF#fx,x#rf"))]
+ "TARGET_64BIT"
+ "*
+{
+ switch (which_alternative)
+ {
+ case 0:
+ /* %%% We loose REG_DEAD notes for controling pops if we split late. */
+ operands[0] = gen_rtx_MEM (SFmode, stack_pointer_rtx);
+ operands[2] = stack_pointer_rtx;
+ operands[3] = GEN_INT (8);
+ if (find_regno_note (insn, REG_DEAD, REGNO (operands[1])))
+ return \"sub{q}\\t{%3, %2|%2, %3}\;fstp%z0\\t%y0\";
+ else
+ return \"sub{q}\\t{%3, %2|%2, %3}\;fst%z0\\t%y0\";
+
+ case 1:
+ return \"push{q}\\t%q1\";
+
+ case 2:
+ return \"#\";
+
+ default:
+ abort ();
+ }
+}"
+ [(set_attr "type" "multi,push,multi")
+ (set_attr "mode" "SF,DI,SF")])
+
(define_split
[(set (match_operand:SF 0 "push_operand" "")
(match_operand:SF 1 "memory_operand" ""))]
(define_split
[(set (match_operand:SF 0 "push_operand" "")
(match_operand:SF 1 "register_operand" ""))]
- "ANY_FP_REGNO_P (REGNO (operands[1]))"
+ "!TARGET_64BIT && ANY_FP_REGNO_P (REGNO (operands[1]))"
[(set (reg:SI 7) (plus:SI (reg:SI 7) (const_int -4)))
(set (mem:SF (reg:SI 7)) (match_dup 1))])
+(define_split
+ [(set (match_operand:SF 0 "push_operand" "")
+ (match_operand:SF 1 "register_operand" ""))]
+ "TARGET_64BIT && ANY_FP_REGNO_P (REGNO (operands[1]))"
+ [(set (reg:DI 7) (plus:DI (reg:DI 7) (const_int -8)))
+ (set (mem:SF (reg:DI 7)) (match_dup 1))])
+
(define_insn "*movsf_1"
[(set (match_operand:SF 0 "nonimmediate_operand" "=f#xr,m,f#xr,r#xf,m,x#rf,x#rf,x#rf,m")
(match_operand:SF 1 "general_operand" "fm#rx,f#rx,G,rmF#fx,Fr#fx,H,x,xm#rf,x#rf"))]
(define_insn "*pushdf_nointeger"
[(set (match_operand:DF 0 "push_operand" "=<,<,<,<")
(match_operand:DF 1 "general_no_elim_operand" "f#Y,Fo#fY,*r#fY,Y#f"))]
- "!TARGET_INTEGER_DFMODE_MOVES"
+ "!TARGET_64BIT && !TARGET_INTEGER_DFMODE_MOVES"
"*
{
switch (which_alternative)
(define_insn "*pushdf_integer"
[(set (match_operand:DF 0 "push_operand" "=<,<,<")
(match_operand:DF 1 "general_no_elim_operand" "f#rY,rFo#fY,Y#rf"))]
- "TARGET_INTEGER_DFMODE_MOVES"
+ "TARGET_64BIT || TARGET_INTEGER_DFMODE_MOVES"
"*
{
switch (which_alternative)
operands[0] = gen_rtx_MEM (DFmode, stack_pointer_rtx);
operands[2] = stack_pointer_rtx;
operands[3] = GEN_INT (8);
- if (find_regno_note (insn, REG_DEAD, REGNO (operands[1])))
- return \"sub{l}\\t{%3, %2|%2, %3}\;fstp%z0\\t%y0\";
+ if (TARGET_64BIT)
+ if (find_regno_note (insn, REG_DEAD, REGNO (operands[1])))
+ return \"sub{q}\\t{%3, %2|%2, %3}\;fstp%z0\\t%y0\";
+ else
+ return \"sub{q}\\t{%3, %2|%2, %3}\;fst%z0\\t%y0\";
else
- return \"sub{l}\\t{%3, %2|%2, %3}\;fst%z0\\t%y0\";
+ if (find_regno_note (insn, REG_DEAD, REGNO (operands[1])))
+ return \"sub{l}\\t{%3, %2|%2, %3}\;fstp%z0\\t%y0\";
+ else
+ return \"sub{l}\\t{%3, %2|%2, %3}\;fst%z0\\t%y0\";
+
case 1:
case 2:
(define_split
[(set (match_operand:DF 0 "push_operand" "")
(match_operand:DF 1 "register_operand" ""))]
- "reload_completed && ANY_FP_REGNO_P (REGNO (operands[1]))"
+ "!TARGET_64BIT && reload_completed && ANY_FP_REGNO_P (REGNO (operands[1]))"
[(set (reg:SI 7) (plus:SI (reg:SI 7) (const_int -8)))
(set (mem:DF (reg:SI 7)) (match_dup 1))]
"")
+(define_split
+ [(set (match_operand:DF 0 "push_operand" "")
+ (match_operand:DF 1 "register_operand" ""))]
+ "TARGET_64BIT && reload_completed && ANY_FP_REGNO_P (REGNO (operands[1]))"
+ [(set (reg:DI 7) (plus:DI (reg:DI 7) (const_int -8)))
+ (set (mem:DF (reg:DI 7)) (match_dup 1))]
+ "")
+
(define_split
[(set (match_operand:DF 0 "push_operand" "")
(match_operand:DF 1 "general_operand" ""))]
operands[0] = gen_rtx_MEM (XFmode, stack_pointer_rtx);
operands[2] = stack_pointer_rtx;
operands[3] = GEN_INT (16);
- if (find_regno_note (insn, REG_DEAD, REGNO (operands[1])))
- return \"sub{l}\\t{%3, %2|%2, %3}\;fstp%z0\\t%y0\";
+ if (TARGET_64BIT)
+ if (find_regno_note (insn, REG_DEAD, REGNO (operands[1])))
+ return \"sub{q}\\t{%3, %2|%2, %3}\;fstp%z0\\t%y0\";
+ else
+ return \"sub{q}\\t{%3, %2|%2, %3}\;fst%z0\\t%y0\";
else
- return \"sub{l}\\t{%3, %2|%2, %3}\;fst%z0\\t%y0\";
+ if (find_regno_note (insn, REG_DEAD, REGNO (operands[1])))
+ return \"sub{l}\\t{%3, %2|%2, %3}\;fstp%z0\\t%y0\";
+ else
+ return \"sub{l}\\t{%3, %2|%2, %3}\;fst%z0\\t%y0\";
case 1:
return \"#\";
(define_split
[(set (match_operand:XF 0 "push_operand" "")
(match_operand:XF 1 "register_operand" ""))]
- "ANY_FP_REGNO_P (REGNO (operands[1]))"
+ "!TARGET_64BIT && ANY_FP_REGNO_P (REGNO (operands[1]))"
[(set (reg:SI 7) (plus:SI (reg:SI 7) (const_int -12)))
(set (mem:XF (reg:SI 7)) (match_dup 1))])
(define_split
[(set (match_operand:TF 0 "push_operand" "")
(match_operand:TF 1 "register_operand" ""))]
- "ANY_FP_REGNO_P (REGNO (operands[1]))"
+ "!TARGET_64BIT && ANY_FP_REGNO_P (REGNO (operands[1]))"
[(set (reg:SI 7) (plus:SI (reg:SI 7) (const_int -16)))
(set (mem:TF (reg:SI 7)) (match_dup 1))])
+(define_split
+ [(set (match_operand:TF 0 "push_operand" "")
+ (match_operand:TF 1 "register_operand" ""))]
+ "TARGET_64BIT && ANY_FP_REGNO_P (REGNO (operands[1]))"
+ [(set (reg:DI 7) (plus:DI (reg:DI 7) (const_int -16)))
+ (set (mem:TF (reg:DI 7)) (match_dup 1))])
+
;; Do not use integer registers when optimizing for size
(define_insn "*movxf_nointeger"
[(set (match_operand:XF 0 "nonimmediate_operand" "=f,m,f,*r,o")