+2017-12-02 Jakub Jelinek <jakub@redhat.com>
+
+ * config/i386/i386-protos.h (standard_sse_constant_opcode): Change
+ last argument to rtx pointer.
+ * config/i386/i386.c (standard_sse_constant_opcode): Replace X argument
+ with OPERANDS. For AVX+ 128-bit VEX encoded instructions over 256-bit
+ or 512-bit. If setting EXT_REX_SSE_REG_P, use EVEX encoded insn
+ depending on the chosen ISAs.
+ * config/i386/i386.md (*movxi_internal_avx512f, *movoi_internal_avx,
+ *movti_internal, *movdi_internal, *movsi_internal, *movtf_internal,
+ *movdf_internal, *movsf_internal): Adjust standard_sse_constant_opcode
+ callers.
+ * config/i386/sse.md (mov<mode>_internal): Likewise.
+ * config/i386/mmx.md (*mov<mode>_internal): Likewise.
+
2017-12-01 Segher Boessenkool <segher@kernel.crashing.org>
* doc/invoke.texi (-dp): Say that instruction cost is printed as well.
}
/* Return the opcode of the special instruction to be used to load
- the constant X. */
+ the constant operands[1] into operands[0]. */
const char *
-standard_sse_constant_opcode (rtx_insn *insn, rtx x)
+standard_sse_constant_opcode (rtx_insn *insn, rtx *operands)
{
machine_mode mode;
+ rtx x = operands[1];
gcc_assert (TARGET_SSE);
{
switch (get_attr_mode (insn))
{
+ case MODE_TI:
+ if (!EXT_REX_SSE_REG_P (operands[0]))
+ return "%vpxor\t%0, %d0";
+ /* FALLTHRU */
case MODE_XI:
- return "vpxord\t%g0, %g0, %g0";
case MODE_OI:
- return (TARGET_AVX512VL
- ? "vpxord\t%x0, %x0, %x0"
- : "vpxor\t%x0, %x0, %x0");
- case MODE_TI:
- return (TARGET_AVX512VL
- ? "vpxord\t%x0, %x0, %x0"
- : "%vpxor\t%0, %d0");
+ if (EXT_REX_SSE_REG_P (operands[0]))
+ return (TARGET_AVX512VL
+ ? "vpxord\t%x0, %x0, %x0"
+ : "vpxord\t%g0, %g0, %g0");
+ return "vpxor\t%x0, %x0, %x0";
+ case MODE_V2DF:
+ if (!EXT_REX_SSE_REG_P (operands[0]))
+ return "%vxorpd\t%0, %d0";
+ /* FALLTHRU */
case MODE_V8DF:
- return (TARGET_AVX512DQ
- ? "vxorpd\t%g0, %g0, %g0"
- : "vpxorq\t%g0, %g0, %g0");
case MODE_V4DF:
- return "vxorpd\t%x0, %x0, %x0";
- case MODE_V2DF:
- return "%vxorpd\t%0, %d0";
+ if (!EXT_REX_SSE_REG_P (operands[0]))
+ return "vxorpd\t%x0, %x0, %x0";
+ else if (TARGET_AVX512DQ)
+ return (TARGET_AVX512VL
+ ? "vxorpd\t%x0, %x0, %x0"
+ : "vxorpd\t%g0, %g0, %g0");
+ else
+ return (TARGET_AVX512VL
+ ? "vpxorq\t%x0, %x0, %x0"
+ : "vpxorq\t%g0, %g0, %g0");
+ case MODE_V4SF:
+ if (!EXT_REX_SSE_REG_P (operands[0]))
+ return "%vxorps\t%0, %d0";
+ /* FALLTHRU */
case MODE_V16SF:
- return (TARGET_AVX512DQ
- ? "vxorps\t%g0, %g0, %g0"
- : "vpxord\t%g0, %g0, %g0");
case MODE_V8SF:
- return "vxorps\t%x0, %x0, %x0";
- case MODE_V4SF:
- return "%vxorps\t%0, %d0";
+ if (!EXT_REX_SSE_REG_P (operands[0]))
+ return "vxorps\t%x0, %x0, %x0";
+ else if (TARGET_AVX512DQ)
+ return (TARGET_AVX512VL
+ ? "vxorps\t%x0, %x0, %x0"
+ : "vxorps\t%g0, %g0, %g0");
+ else
+ return (TARGET_AVX512VL
+ ? "vpxord\t%x0, %x0, %x0"
+ : "vpxord\t%g0, %g0, %g0");
default:
gcc_unreachable ();
case MODE_V2DF:
case MODE_V4SF:
gcc_assert (TARGET_SSE2);
- return (TARGET_AVX512F
- ? "vpternlogd\t{$0xFF, %0, %0, %0|%0, %0, %0, 0xFF}"
- : TARGET_AVX
+ if (!EXT_REX_SSE_REG_P (operands[0]))
+ return (TARGET_AVX
? "vpcmpeqd\t%0, %0, %0"
: "pcmpeqd\t%0, %0");
+ else if (TARGET_AVX512VL)
+ return "vpternlogd\t{$0xFF, %0, %0, %0|%0, %0, %0, 0xFF}";
+ else
+ return "vpternlogd\t{$0xFF, %g0, %g0, %g0|%g0, %g0, %g0, 0xFF}";
default:
gcc_unreachable ();
switch (get_attr_type (insn))
{
case TYPE_SSELOG1:
- return standard_sse_constant_opcode (insn, operands[1]);
+ return standard_sse_constant_opcode (insn, operands);
case TYPE_SSEMOV:
if (misaligned_operand (operands[0], XImode)
switch (get_attr_type (insn))
{
case TYPE_SSELOG1:
- return standard_sse_constant_opcode (insn, operands[1]);
+ return standard_sse_constant_opcode (insn, operands);
case TYPE_SSEMOV:
if (misaligned_operand (operands[0], OImode)
return "#";
case TYPE_SSELOG1:
- return standard_sse_constant_opcode (insn, operands[1]);
+ return standard_sse_constant_opcode (insn, operands);
case TYPE_SSEMOV:
/* TDmode values are passed as TImode on the stack. Moving them
return "movq\t{%1, %0|%0, %1}";
case TYPE_SSELOG1:
- return standard_sse_constant_opcode (insn, operands[1]);
+ return standard_sse_constant_opcode (insn, operands);
case TYPE_SSEMOV:
switch (get_attr_mode (insn))
switch (get_attr_type (insn))
{
case TYPE_SSELOG1:
- return standard_sse_constant_opcode (insn, operands[1]);
+ return standard_sse_constant_opcode (insn, operands);
case TYPE_MSKMOV:
return "kmovd\t{%1, %0|%0, %1}";
switch (get_attr_type (insn))
{
case TYPE_SSELOG1:
- return standard_sse_constant_opcode (insn, operands[1]);
+ return standard_sse_constant_opcode (insn, operands);
case TYPE_SSEMOV:
/* Handle misaligned load/store since we
return "mov{q}\t{%1, %0|%0, %1}";
case TYPE_SSELOG1:
- return standard_sse_constant_opcode (insn, operands[1]);
+ return standard_sse_constant_opcode (insn, operands);
case TYPE_SSEMOV:
switch (get_attr_mode (insn))
return "mov{l}\t{%1, %0|%0, %1}";
case TYPE_SSELOG1:
- return standard_sse_constant_opcode (insn, operands[1]);
+ return standard_sse_constant_opcode (insn, operands);
case TYPE_SSEMOV:
switch (get_attr_mode (insn))