[(V16QI "TARGET_SSE4_1") V8HI])
(define_insn "*vec_extract<mode>"
- [(set (match_operand:<ssescalarmode> 0 "register_sse4nonimm_operand" "=r,m")
+ [(set (match_operand:<ssescalarmode> 0 "register_sse4nonimm_operand" "=r,m,r,m")
(vec_select:<ssescalarmode>
- (match_operand:PEXTR_MODE12 1 "register_operand" "x,x")
+ (match_operand:PEXTR_MODE12 1 "register_operand" "x,x,v,v")
(parallel
[(match_operand:SI 2 "const_0_to_<ssescalarnummask>_operand")])))]
"TARGET_SSE2"
"@
%vpextr<ssemodesuffix>\t{%2, %1, %k0|%k0, %1, %2}
- %vpextr<ssemodesuffix>\t{%2, %1, %0|%0, %1, %2}"
- [(set_attr "isa" "*,sse4")
+ %vpextr<ssemodesuffix>\t{%2, %1, %0|%0, %1, %2}
+ vpextr<ssemodesuffix>\t{%2, %1, %k0|%k0, %1, %2}
+ vpextr<ssemodesuffix>\t{%2, %1, %0|%0, %1, %2}"
+ [(set_attr "isa" "*,sse4,avx512bw,avx512bw")
(set_attr "type" "sselog1")
(set_attr "prefix_data16" "1")
(set (attr "prefix_extra")
(if_then_else
- (and (eq_attr "alternative" "0")
+ (and (eq_attr "alternative" "0,2")
(eq (const_string "<MODE>mode") (const_string "V8HImode")))
(const_string "*")
(const_string "1")))
(set_attr "length_immediate" "1")
- (set_attr "prefix" "maybe_vex")
+ (set_attr "prefix" "maybe_vex,maybe_vex,evex,evex")
(set_attr "mode" "TI")])
(define_insn "*vec_extract<PEXTR_MODE12:mode>_zext"
- [(set (match_operand:SWI48 0 "register_operand" "=r")
+ [(set (match_operand:SWI48 0 "register_operand" "=r,r")
(zero_extend:SWI48
(vec_select:<PEXTR_MODE12:ssescalarmode>
- (match_operand:PEXTR_MODE12 1 "register_operand" "x")
+ (match_operand:PEXTR_MODE12 1 "register_operand" "x,v")
(parallel
[(match_operand:SI 2
"const_0_to_<PEXTR_MODE12:ssescalarnummask>_operand")]))))]
"TARGET_SSE2"
- "%vpextr<PEXTR_MODE12:ssemodesuffix>\t{%2, %1, %k0|%k0, %1, %2}"
- [(set_attr "type" "sselog1")
+ "@
+ %vpextr<PEXTR_MODE12:ssemodesuffix>\t{%2, %1, %k0|%k0, %1, %2}
+ vpextr<PEXTR_MODE12:ssemodesuffix>\t{%2, %1, %k0|%k0, %1, %2}"
+ [(set_attr "isa" "*,avx512bw")
+ (set_attr "type" "sselog1")
(set_attr "prefix_data16" "1")
(set (attr "prefix_extra")
(if_then_else
"#")
(define_insn "*vec_extract<ssevecmodelower>_0"
- [(set (match_operand:SWI48 0 "nonimmediate_operand" "=r ,r,x ,m")
+ [(set (match_operand:SWI48 0 "nonimmediate_operand" "=r ,r,v ,m")
(vec_select:SWI48
- (match_operand:<ssevecmode> 1 "nonimmediate_operand" "mYj,x,xm,x")
+ (match_operand:<ssevecmode> 1 "nonimmediate_operand" "mYj,v,vm,v")
(parallel [(const_int 0)])))]
"TARGET_SSE && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
"#"
[(set (match_operand:DI 0 "register_operand" "=r")
(zero_extend:DI
(vec_select:SI
- (match_operand:V4SI 1 "register_operand" "x")
+ (match_operand:V4SI 1 "register_operand" "v")
(parallel [(const_int 0)]))))]
"TARGET_64BIT && TARGET_SSE2 && TARGET_INTER_UNIT_MOVES_FROM_VEC"
"#"
"operands[1] = gen_lowpart (SImode, operands[1]);")
(define_insn "*vec_extractv2di_0_sse"
- [(set (match_operand:DI 0 "nonimmediate_operand" "=x,m")
+ [(set (match_operand:DI 0 "nonimmediate_operand" "=v,m")
(vec_select:DI
- (match_operand:V2DI 1 "nonimmediate_operand" "xm,x")
+ (match_operand:V2DI 1 "nonimmediate_operand" "vm,v")
(parallel [(const_int 0)])))]
"TARGET_SSE && !TARGET_64BIT
&& !(MEM_P (operands[0]) && MEM_P (operands[1]))"
"operands[1] = gen_lowpart (<MODE>mode, operands[1]);")
(define_insn "*vec_extractv4si"
- [(set (match_operand:SI 0 "nonimmediate_operand" "=rm,Yr,*x,x")
+ [(set (match_operand:SI 0 "nonimmediate_operand" "=rm,rm,Yr,*x,x,Yv")
(vec_select:SI
- (match_operand:V4SI 1 "register_operand" "x,0,0,x")
+ (match_operand:V4SI 1 "register_operand" "x,v,0,0,x,v")
(parallel [(match_operand:SI 2 "const_0_to_3_operand")])))]
"TARGET_SSE4_1"
{
switch (which_alternative)
{
case 0:
+ case 1:
return "%vpextrd\t{%2, %1, %0|%0, %1, %2}";
- case 1:
case 2:
- operands [2] = GEN_INT (INTVAL (operands[2]) * 4);
+ case 3:
+ operands[2] = GEN_INT (INTVAL (operands[2]) * 4);
return "psrldq\t{%2, %0|%0, %2}";
- case 3:
- operands [2] = GEN_INT (INTVAL (operands[2]) * 4);
+ case 4:
+ case 5:
+ operands[2] = GEN_INT (INTVAL (operands[2]) * 4);
return "vpsrldq\t{%2, %1, %0|%0, %1, %2}";
default:
gcc_unreachable ();
}
}
- [(set_attr "isa" "*,noavx,noavx,avx")
- (set_attr "type" "sselog1,sseishft1,sseishft1,sseishft1")
- (set_attr "prefix_extra" "1,*,*,*")
+ [(set_attr "isa" "*,avx512dq,noavx,noavx,avx,avx512bw")
+ (set_attr "type" "sselog1,sselog1,sseishft1,sseishft1,sseishft1,sseishft1")
+ (set_attr "prefix_extra" "1,1,*,*,*,*")
(set_attr "length_immediate" "1")
- (set_attr "prefix" "maybe_vex,orig,orig,vex")
+ (set_attr "prefix" "maybe_vex,evex,orig,orig,vex,evex")
(set_attr "mode" "TI")])
(define_insn "*vec_extractv4si_zext"
- [(set (match_operand:DI 0 "register_operand" "=r")
+ [(set (match_operand:DI 0 "register_operand" "=r,r")
(zero_extend:DI
(vec_select:SI
- (match_operand:V4SI 1 "register_operand" "x")
+ (match_operand:V4SI 1 "register_operand" "x,v")
(parallel [(match_operand:SI 2 "const_0_to_3_operand")]))))]
"TARGET_64BIT && TARGET_SSE4_1"
"%vpextrd\t{%2, %1, %k0|%k0, %1, %2}"
- [(set_attr "type" "sselog1")
+ [(set_attr "isa" "*,avx512dq")
+ (set_attr "type" "sselog1")
(set_attr "prefix_extra" "1")
(set_attr "length_immediate" "1")
(set_attr "prefix" "maybe_vex")
})
(define_insn "*vec_extractv2di_1"
- [(set (match_operand:DI 0 "nonimmediate_operand" "=rm,m,x,x,x,x,r")
+ [(set (match_operand:DI 0 "nonimmediate_operand" "=rm,rm,m,x,x,Yv,x,v,r")
(vec_select:DI
- (match_operand:V2DI 1 "nonimmediate_operand" "x ,x,0,x,x,o,o")
+ (match_operand:V2DI 1 "nonimmediate_operand" "x ,v ,v,0,x, v,x,o,o")
(parallel [(const_int 1)])))]
"TARGET_SSE && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
"@
%vpextrq\t{$1, %1, %0|%0, %1, 1}
+ vpextrq\t{$1, %1, %0|%0, %1, 1}
%vmovhps\t{%1, %0|%0, %1}
psrldq\t{$8, %0|%0, 8}
vpsrldq\t{$8, %1, %0|%0, %1, 8}
+ vpsrldq\t{$8, %1, %0|%0, %1, 8}
movhlps\t{%1, %0|%0, %1}
#
#"
- [(set_attr "isa" "x64_sse4,*,sse2_noavx,avx,noavx,*,x64")
- (set_attr "type" "sselog1,ssemov,sseishft1,sseishft1,ssemov,ssemov,imov")
- (set_attr "length_immediate" "1,*,1,1,*,*,*")
- (set_attr "prefix_rex" "1,*,*,*,*,*,*")
- (set_attr "prefix_extra" "1,*,*,*,*,*,*")
- (set_attr "prefix" "maybe_vex,maybe_vex,orig,vex,orig,*,*")
- (set_attr "mode" "TI,V2SF,TI,TI,V4SF,DI,DI")])
+ [(set_attr "isa" "x64_sse4,x64_avx512dq,*,sse2_noavx,avx,avx512bw,noavx,*,x64")
+ (set_attr "type" "sselog1,sselog1,ssemov,sseishft1,sseishft1,sseishft1,ssemov,ssemov,imov")
+ (set_attr "length_immediate" "1,1,*,1,1,1,*,*,*")
+ (set_attr "prefix_rex" "1,1,*,*,*,*,*,*,*")
+ (set_attr "prefix_extra" "1,1,*,*,*,*,*,*,*")
+ (set_attr "prefix" "maybe_vex,evex,maybe_vex,orig,vex,evex,orig,*,*")
+ (set_attr "mode" "TI,TI,V2SF,TI,TI,TI,V4SF,DI,DI")])
(define_split
[(set (match_operand:<ssescalarmode> 0 "register_operand")
--- /dev/null
+/* { dg-do compile { target { ! ia32 } } } */
+/* { dg-options "-O2 -mavx512vl -mavx512bw" } */
+
+typedef char v16qi __attribute__((vector_size (16)));
+typedef short v8hi __attribute__((vector_size (16)));
+typedef int v4si __attribute__((vector_size (16)));
+typedef long long v2di __attribute__((vector_size (16)));
+
+void
+f1 (v16qi a)
+{
+ register v16qi c __asm ("xmm16") = a;
+ register unsigned char e __asm ("dl");
+ asm volatile ("" : "+v" (c));
+ v16qi d = c;
+ e = ((unsigned char *) &d)[3];
+ asm volatile ("" : : "q" (e));
+}
+
+unsigned short
+f2 (v8hi a)
+{
+ register v8hi c __asm ("xmm16") = a;
+ register unsigned short e __asm ("dx");
+ asm volatile ("" : "+v" (c));
+ v8hi d = c;
+ e = ((unsigned short *) &d)[3];
+ asm volatile ("" : : "r" (e));
+}
+
+unsigned int
+f3 (v16qi a)
+{
+ register v16qi c __asm ("xmm16") = a;
+ asm volatile ("" : "+v" (c));
+ v16qi d = c;
+ return ((unsigned char *) &d)[3];
+}
+
+unsigned int
+f4 (v8hi a)
+{
+ register v8hi c __asm ("xmm16") = a;
+ asm volatile ("" : "+v" (c));
+ v8hi d = c;
+ return ((unsigned short *) &d)[3];
+}
+
+unsigned long long
+f5 (v16qi a)
+{
+ register v16qi c __asm ("xmm16") = a;
+ asm volatile ("" : "+v" (c));
+ v16qi d = c;
+ return ((unsigned char *) &d)[3];
+}
+
+unsigned long long
+f6 (v8hi a)
+{
+ register v8hi c __asm ("xmm16") = a;
+ asm volatile ("" : "+v" (c));
+ v8hi d = c;
+ return ((unsigned short *) &d)[3];
+}
+
+void
+f7 (v16qi a, unsigned char *p)
+{
+ register v16qi c __asm ("xmm16") = a;
+ asm volatile ("" : "+v" (c));
+ v16qi d = c;
+ *p = ((unsigned char *) &d)[3];
+}
+
+void
+f8 (v8hi a, unsigned short *p)
+{
+ register v8hi c __asm ("xmm16") = a;
+ asm volatile ("" : "+v" (c));
+ v8hi d = c;
+ *p = ((unsigned short *) &d)[3];
+}
+
+void
+f9 (v4si a)
+{
+ register v4si c __asm ("xmm16") = a;
+ register unsigned int e __asm ("xmm17");
+ asm volatile ("" : "+v" (c));
+ v4si d = c;
+ e = ((unsigned int *) &d)[3];
+ asm volatile ("" : "+v" (e));
+}
+
+void
+f10 (v2di a)
+{
+ register v2di c __asm ("xmm16") = a;
+ register unsigned long long e __asm ("xmm17");
+ asm volatile ("" : "+v" (c));
+ v2di d = c;
+ e = ((unsigned long long *) &d)[1];
+ asm volatile ("" : "+v" (e));
+}
+
+/* { dg-final { scan-assembler-times "vpextrb\[^\n\r]*xmm16" 4 } } */
+/* { dg-final { scan-assembler-times "vpextrw\[^\n\r]*xmm16" 4 } } */
+/* { dg-final { scan-assembler-times "vpsrldq\[^\n\r]*xmm1\[67\]\[^\n\r]*xmm1\[67\]" 2 } } */