+2011-03-27 H.J. Lu <hongjiu.lu@intel.com>
+
+ * config/i386/sse.md (*avx_mov<mode>_internal): Don't assert
+ unaligned 256bit load/store.
+ (*avx_movu<ssemodesuffix><avxmodesuffix>): Likewise.
+ (*avx_movdqu<avxmodesuffix>): Likewise.
+
2011-03-27 Vladimir Makarov <vmakarov@redhat.com>
PR bootstrap/48307
return standard_sse_constant_opcode (insn, operands[1]);
case 1:
case 2:
- if (GET_MODE_ALIGNMENT (<MODE>mode) == 256
- && ((TARGET_AVX256_SPLIT_UNALIGNED_STORE
- && misaligned_operand (operands[0], <MODE>mode))
- || (TARGET_AVX256_SPLIT_UNALIGNED_LOAD
- && misaligned_operand (operands[1], <MODE>mode))))
- gcc_unreachable ();
switch (get_attr_mode (insn))
{
case MODE_V8SF:
UNSPEC_MOVU))]
"AVX_VEC_FLOAT_MODE_P (<MODE>mode)
&& !(MEM_P (operands[0]) && MEM_P (operands[1]))"
-{
- if (GET_MODE_ALIGNMENT (<MODE>mode) == 256
- && ((TARGET_AVX256_SPLIT_UNALIGNED_STORE
- && misaligned_operand (operands[0], <MODE>mode))
- || (TARGET_AVX256_SPLIT_UNALIGNED_LOAD
- && misaligned_operand (operands[1], <MODE>mode))))
- gcc_unreachable ();
- return "vmovu<ssemodesuffix>\t{%1, %0|%0, %1}";
-}
+ "vmovu<ssemodesuffix>\t{%1, %0|%0, %1}"
[(set_attr "type" "ssemov")
(set_attr "movu" "1")
(set_attr "prefix" "vex")
[(match_operand:AVXMODEQI 1 "nonimmediate_operand" "xm,x")]
UNSPEC_MOVU))]
"TARGET_AVX && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
-{
- if (GET_MODE_ALIGNMENT (<MODE>mode) == 256
- && ((TARGET_AVX256_SPLIT_UNALIGNED_STORE
- && misaligned_operand (operands[0], <MODE>mode))
- || (TARGET_AVX256_SPLIT_UNALIGNED_LOAD
- && misaligned_operand (operands[1], <MODE>mode))))
- gcc_unreachable ();
- return "vmovdqu\t{%1, %0|%0, %1}";
-}
+ "vmovdqu\t{%1, %0|%0, %1}"
[(set_attr "type" "ssemov")
(set_attr "movu" "1")
(set_attr "prefix" "vex")