(define_insn "*movqi_internal"
[(set (match_operand:QI 0 "nonimmediate_operand"
- "=q,q ,q ,r,r ,?r,m ,k,k,r,m,k")
+ "=Q,R,r,q,q,r,r ,?r,m ,k,k,r,m,k")
(match_operand:QI 1 "general_operand"
- "q ,qn,qm,q,rn,qm,qn,r,k,k,k,m"))]
+ "Q ,R,r,n,m,q,rn, m,qn,r,k,k,k,m"))]
"!(MEM_P (operands[0]) && MEM_P (operands[1]))"
{
static char buf[128];
case TYPE_MSKMOV:
switch (which_alternative)
{
- case 7:
+ case 9:
ops = "kmov%s\t{%%k1, %%0|%%0, %%k1}";
break;
- case 9:
+ case 11:
ops = "kmov%s\t{%%1, %%k0|%%k0, %%1}";
break;
- case 10:
- case 11:
+ case 12:
+ case 13:
gcc_assert (TARGET_AVX512DQ);
/* FALLTHRU */
- case 8:
+ case 10:
ops = "kmov%s\t{%%1, %%0|%%0, %%1}";
break;
default:
}
}
[(set (attr "isa")
- (if_then_else (eq_attr "alternative" "10,11")
- (const_string "avx512dq")
- (const_string "*")))
+ (cond [(eq_attr "alternative" "1,2")
+ (const_string "x64")
+ (eq_attr "alternative" "12,13")
+ (const_string "avx512dq")
+ ]
+ (const_string "*")))
(set (attr "type")
- (cond [(eq_attr "alternative" "7,8,9,10,11")
+ (cond [(eq_attr "alternative" "9,10,11,12,13")
(const_string "mskmov")
- (and (eq_attr "alternative" "5")
+ (and (eq_attr "alternative" "7")
(not (match_operand:QI 1 "aligned_operand")))
(const_string "imovx")
(match_test "optimize_function_for_size_p (cfun)")
(const_string "imov")
- (and (eq_attr "alternative" "3")
+ (and (eq_attr "alternative" "5")
(ior (not (match_test "TARGET_PARTIAL_REG_STALL"))
(not (match_test "TARGET_QIMODE_MATH"))))
(const_string "imov")
- (eq_attr "alternative" "3,5")
+ (eq_attr "alternative" "5,7")
(const_string "imovx")
(and (match_test "TARGET_MOVX")
- (eq_attr "alternative" "2"))
+ (eq_attr "alternative" "4"))
(const_string "imovx")
]
(const_string "imov")))
(set (attr "prefix")
- (if_then_else (eq_attr "alternative" "7,8,9")
+ (if_then_else (eq_attr "alternative" "9,10,11")
(const_string "vex")
(const_string "orig")))
(set (attr "mode")
- (cond [(eq_attr "alternative" "3,4,5")
+ (cond [(eq_attr "alternative" "5,6,7")
(const_string "SI")
- (eq_attr "alternative" "6")
+ (eq_attr "alternative" "8")
(const_string "QI")
- (and (eq_attr "alternative" "7,8,9")
+ (and (eq_attr "alternative" "9,10,11")
(not (match_test "TARGET_AVX512DQ")))
(const_string "HI")
(eq_attr "type" "imovx")
(const_string "SI")
+ ;; For -Os, 8-bit immediates are always shorter than 32-bit
+ ;; ones.
+ (and (eq_attr "type" "imov")
+ (and (eq_attr "alternative" "3")
+ (match_test "optimize_function_for_size_p (cfun)")))
+ (const_string "QI")
+ ;; For -Os, movl where one or both operands are NON_Q_REGS
+ ;; and both are LEGACY_REGS is shorter than movb.
+ ;; Otherwise movb and movl sizes are the same, so decide purely
+ ;; based on speed factors.
+ (and (eq_attr "type" "imov")
+ (and (eq_attr "alternative" "1")
+ (match_test "optimize_function_for_size_p (cfun)")))
+ (const_string "SI")
(and (eq_attr "type" "imov")
- (and (eq_attr "alternative" "0,1")
+ (and (eq_attr "alternative" "0,1,2,3")
(and (match_test "TARGET_PARTIAL_REG_DEPENDENCY")
- (and (not (match_test "optimize_function_for_size_p (cfun)"))
- (not (match_test "TARGET_PARTIAL_REG_STALL"))))))
+ (not (match_test "TARGET_PARTIAL_REG_STALL")))))
(const_string "SI")
;; Avoid partial register stalls when not using QImode arithmetic
(and (eq_attr "type" "imov")
- (and (eq_attr "alternative" "0,1")
+ (and (eq_attr "alternative" "0,1,2,3")
(and (match_test "TARGET_PARTIAL_REG_STALL")
(not (match_test "TARGET_QIMODE_MATH")))))
(const_string "SI")