+2011-05-04 Uros Bizjak <ubizjak@gmail.com>
+
+ PR target/48860
+ * config/i386/i386.md (*movdi_internal_rex64) Use %vmovd
+ for reg<->xmm moves.
+ * config/i386/sse.md (*vec_concatv2di_rex64_sse4_1): Ditto.
+ (vec_concatv2di_rex64_sse): Ditto.
+ (*sse2_storeq_rex64): Do not emit %v prefix for mov{q} mnemonic.
+ (*vec_extractv2di_1_rex64): Ditto.
+
+ Revert:
+ 2011-05-02 Uros Bizjak <ubizjak@gmail.com>
+
+ * config/i386/mmx.md (*mov<mode>_internal_rex64): Use %vmovq for
+ reg<->xmm moves.
+ (*movv2sf_internal_rex64): Use %vmovq for reg<->xmm moves.
+
2011-05-04 Richard Guenther <rguenther@suse.de>
* tree.h (int_const_binop): Remove notrunc argument.
* config/mips/mips.h (enum mips_code_readable_setting): Move to
mips-opts.h.
(mips_abi, mips_code_readable): Don't declare.
- * config/mips/mips.opt (config/mips/mips-opts.h): New
- HeaderInclude.
+ * config/mips/mips.opt (config/mips/mips-opts.h): New HeaderInclude.
(mabi=): Use Enum and Var.
(mips_abi): New Enum and EnumValue entries.
(mcode-readable=): Use Enum and Var.
case TYPE_SSEMOV:
if (get_attr_mode (insn) == MODE_TI)
return "%vmovdqa\t{%1, %0|%0, %1}";
+ /* Handle broken assemblers that reqire movd instead of movq. */
+ if (GENERAL_REG_P (operands[0]) || GENERAL_REG_P (operands[1]))
+ return "%vmovd\t{%1, %0|%0, %1}";
return "%vmovq\t{%1, %0|%0, %1}";
case TYPE_MMXMOV:
- /* Moves from and into integer register is done using movd
- opcode with REX prefix. */
+ /* Handle broken assemblers that reqire movd instead of movq. */
if (GENERAL_REG_P (operands[0]) || GENERAL_REG_P (operands[1]))
return "movd\t{%1, %0|%0, %1}";
return "movq\t{%1, %0|%0, %1}";
case 11:
case 12:
- return "%vmovd\t{%1, %0|%0, %1}";
+ /* Handle broken assemblers that reqire movd instead of movq. */
+ return "%vmovd\t{%1, %0|%0, %1}";
default:
gcc_unreachable();
case 9: case 10: case 14: case 15:
return "movd\t{%1, %0|%0, %1}";
- case 12: case 13:
- return "%vmovd\t{%1, %0|%0, %1}";
case 11:
return "movq\t{%1, %0|%0, %1}";
+ case 12: case 13:
+ return "%vmovd\t{%1, %0|%0, %1}";
+
default:
gcc_unreachable ();
}
DONE;
})
+;; movd instead of movq is required to handle broken assemblers.
(define_insn "*mov<mode>_internal_rex64"
[(set (match_operand:MMXMODEI8 0 "nonimmediate_operand"
"=rm,r,!?y,!y,!?y,m ,!y ,*Y2,x,x ,m,r ,Yi")
%vpxor\t%0, %d0
%vmovq\t{%1, %0|%0, %1}
%vmovq\t{%1, %0|%0, %1}
- %vmovq\t{%1, %0|%0, %1}
- %vmovq\t{%1, %0|%0, %1}"
+ %vmovd\t{%1, %0|%0, %1}
+ %vmovd\t{%1, %0|%0, %1}"
[(set_attr "type" "imov,imov,mmx,mmxmov,mmxmov,mmxmov,ssecvt,ssecvt,sselog1,ssemov,ssemov,ssemov,ssemov")
(set_attr "unit" "*,*,*,*,*,*,mmx,mmx,*,*,*,*,*")
(set_attr "prefix_rep" "*,*,*,*,*,*,1,1,*,1,*,*,*")
DONE;
})
+;; movd instead of movq is required to handle broken assemblers.
(define_insn "*movv2sf_internal_rex64"
[(set (match_operand:V2SF 0 "nonimmediate_operand"
"=rm,r,!?y,!y,!?y,m ,!y ,*Y2,x,x,x,m,r ,Yi")
%vmovaps\t{%1, %0|%0, %1}
%vmovlps\t{%1, %d0|%d0, %1}
%vmovlps\t{%1, %0|%0, %1}
- %vmovq\t{%1, %0|%0, %1}
- %vmovq\t{%1, %0|%0, %1}"
+ %vmovd\t{%1, %0|%0, %1}
+ %vmovd\t{%1, %0|%0, %1}"
[(set_attr "type" "imov,imov,mmx,mmxmov,mmxmov,mmxmov,ssecvt,ssecvt,ssemov,sselog1,ssemov,ssemov,ssemov,ssemov")
(set_attr "unit" "*,*,*,*,*,*,mmx,mmx,*,*,*,*,*,*")
(set_attr "prefix_rep" "*,*,*,*,*,*,1,1,*,*,*,*,*,*")
"@
#
#
- %vmov{q}\t{%1, %0|%0, %1}"
+ mov{q}\t{%1, %0|%0, %1}"
[(set_attr "type" "*,*,imov")
- (set_attr "prefix" "*,*,maybe_vex")
(set_attr "mode" "*,*,DI")])
(define_insn "*sse2_storeq"
psrldq\t{$8, %0|%0, 8}
vpsrldq\t{$8, %1, %0|%0, %1, 8}
%vmovq\t{%H1, %0|%0, %H1}
- %vmov{q}\t{%H1, %0|%0, %H1}"
+ mov{q}\t{%H1, %0|%0, %H1}"
[(set_attr "isa" "base,noavx,avx,base,base")
(set_attr "type" "ssemov,sseishft1,sseishft1,ssemov,imov")
(set_attr "length_immediate" "*,1,1,*,*")
(set_attr "memory" "*,none,none,*,*")
- (set_attr "prefix" "maybe_vex,orig,vex,maybe_vex,maybe_vex")
+ (set_attr "prefix" "maybe_vex,orig,vex,maybe_vex,orig")
(set_attr "mode" "V2SF,TI,TI,TI,DI")])
(define_insn "*vec_extractv2di_1_sse2"
[(set_attr "type" "sselog,ssemov,ssemov")
(set_attr "mode" "TI,V4SF,V2SF")])
+;; movd instead of movq is required to handle broken assemblers.
(define_insn "*vec_concatv2di_rex64_sse4_1"
[(set (match_operand:V2DI 0 "register_operand"
"=x, x, x,Yi,!x,x,x,x,x")
pinsrq\t{$0x1, %2, %0|%0, %2, 0x1}
vpinsrq\t{$0x1, %2, %1, %0|%0, %1, %2, 0x1}
%vmovq\t{%1, %0|%0, %1}
- %vmovq\t{%1, %0|%0, %1}
+ %vmovd\t{%1, %0|%0, %1}
movq2dq\t{%1, %0|%0, %1}
punpcklqdq\t{%2, %0|%0, %2}
vpunpcklqdq\t{%2, %1, %0|%0, %1, %2}
(set_attr "prefix" "orig,vex,maybe_vex,maybe_vex,orig,orig,vex,orig,vex")
(set_attr "mode" "TI,TI,TI,TI,TI,TI,TI,V2SF,V2SF")])
+;; movd instead of movq is required to handle broken assemblers.
(define_insn "*vec_concatv2di_rex64_sse"
[(set (match_operand:V2DI 0 "register_operand" "=Y2,Yi,!Y2,Y2,x,x")
(vec_concat:V2DI
"TARGET_64BIT && TARGET_SSE"
"@
movq\t{%1, %0|%0, %1}
- movq\t{%1, %0|%0, %1}
+ movd\t{%1, %0|%0, %1}
movq2dq\t{%1, %0|%0, %1}
punpcklqdq\t{%2, %0|%0, %2}
movlhps\t{%2, %0|%0, %2}