['VMINPS', 'x86_avx_min_ps_256', ['a', 'b']],
['VMAXPS', 'x86_avx_max_ps_256', ['a', 'b']],
['VROUND', 'x86_avx_round_ps_256', ['a', 'rounding']],
- ['VCMPPS', 'x86_avx_cmp_ps_256', ['a', 'b', 'cmpop']],
- ['VBLENDVPS', 'x86_avx_blendv_ps_256', ['a', 'b', 'mask']],
['BEXTR_32', 'x86_bmi_bextr_32', ['src', 'control']],
- ['VMASKLOADD', 'x86_avx2_maskload_d_256', ['src', 'mask']],
- ['VMASKMOVPS', 'x86_avx_maskload_ps_256', ['src', 'mask']],
- ['VMASKSTOREPS', 'x86_avx_maskstore_ps_256', ['src', 'mask', 'val']],
['VPSHUFB', 'x86_avx2_pshuf_b', ['a', 'b']],
['VPERMD', 'x86_avx2_permd', ['a', 'idx']],
['VPERMPS', 'x86_avx2_permps', ['idx', 'a']],
CallInst *CALL2(Value *Callee, Value* arg1, Value* arg2);
CallInst *CALL3(Value *Callee, Value* arg1, Value* arg2, Value* arg3);
-Value *VCMPPS_EQ(Value* a, Value* b) { return VCMPPS(a, b, C((uint8_t)_CMP_EQ_OQ)); }
-Value *VCMPPS_LT(Value* a, Value* b) { return VCMPPS(a, b, C((uint8_t)_CMP_LT_OQ)); }
-Value *VCMPPS_LE(Value* a, Value* b) { return VCMPPS(a, b, C((uint8_t)_CMP_LE_OQ)); }
-Value *VCMPPS_ISNAN(Value* a, Value* b) { return VCMPPS(a, b, C((uint8_t)_CMP_UNORD_Q)); }
-Value *VCMPPS_NEQ(Value* a, Value* b) { return VCMPPS(a, b, C((uint8_t)_CMP_NEQ_OQ)); }
-Value *VCMPPS_GE(Value* a, Value* b) { return VCMPPS(a, b, C((uint8_t)_CMP_GE_OQ)); }
-Value *VCMPPS_GT(Value* a, Value* b) { return VCMPPS(a, b, C((uint8_t)_CMP_GT_OQ)); }
-Value *VCMPPS_NOTNAN(Value* a, Value* b){ return VCMPPS(a, b, C((uint8_t)_CMP_ORD_Q)); }
-
Value *MASK(Value *vmask);
Value *MASK_16(Value *vmask);