From: Uros Bizjak Date: Tue, 16 Aug 2016 18:23:47 +0000 (+0200) Subject: i386.md (*ashl3_mask): Rewrite define_insn pattern as define_insn_and_split. X-Git-Url: https://git.libre-soc.org/?a=commitdiff_plain;h=3c5db734e071af3ea89cb9dfc0a03d89744ed455;p=gcc.git i386.md (*ashl3_mask): Rewrite define_insn pattern as define_insn_and_split. * config/i386/i386.md (*ashl3_mask): Rewrite define_insn pattern as define_insn_and_split. Split insn before reload to ashl3_1. (*3_mask): Ditto. Split insn before reload to 3_1. (*3_mask): Ditto. Split insn before reload to 3_1. From-SVN: r239511 --- diff --git a/gcc/ChangeLog b/gcc/ChangeLog index 8bd7dfc794f..b128931ccb3 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,3 +1,13 @@ +2016-08-16 Uros Bizjak + + * config/i386/i386.md (*ashl3_mask): Rewrite define_insn + pattern as define_insn_and_split. Split insn before reload to + ashl3_1. + (*3_mask): Ditto. Split insn before reload to + 3_1. + (*3_mask): Ditto. Split insn before reload to + 3_1. + 2016-08-16 David Malcolm PR c/72857 @@ -33,8 +43,8 @@ 2016-08-16 Jakub Jelinek PR target/71910 - * tree-cfg.c (execute_fixup_cfg): Add node variable, use it. Before inlining, - add cgraph edge for the added __builtin_unreachable call. + * tree-cfg.c (execute_fixup_cfg): Add node variable, use it. Before + inlining, add cgraph edge for the added __builtin_unreachable call. PR middle-end/67485 * expmed.c (expand_mult_const): Change val_so_far's type to UHWI, diff --git a/gcc/config/i386/i386.md b/gcc/config/i386/i386.md index e5d2fd01146..eed779866b4 100644 --- a/gcc/config/i386/i386.md +++ b/gcc/config/i386/i386.md @@ -9794,23 +9794,27 @@ }) ;; Avoid useless masking of count operand. -(define_insn "*ashl3_mask" - [(set (match_operand:SWI48 0 "nonimmediate_operand" "=rm") +(define_insn_and_split "*ashl3_mask" + [(set (match_operand:SWI48 0 "nonimmediate_operand") (ashift:SWI48 - (match_operand:SWI48 1 "nonimmediate_operand" "0") + (match_operand:SWI48 1 "nonimmediate_operand") (subreg:QI (and:SI - (match_operand:SI 2 "register_operand" "c") - (match_operand:SI 3 "const_int_operand" "n")) 0))) + (match_operand:SI 2 "register_operand") + (match_operand:SI 3 "const_int_operand")) 0))) (clobber (reg:CC FLAGS_REG))] "ix86_binary_operator_ok (ASHIFT, mode, operands) && (INTVAL (operands[3]) & (GET_MODE_BITSIZE (mode)-1)) - == GET_MODE_BITSIZE (mode)-1" -{ - return "sal{}\t{%b2, %0|%0, %b2}"; -} - [(set_attr "type" "ishift") - (set_attr "mode" "")]) + == GET_MODE_BITSIZE (mode)-1 + && can_create_pseudo_p ()" + "#" + "&& 1" + [(parallel + [(set (match_dup 0) + (ashift:SWI48 (match_dup 1) + (match_dup 2))) + (clobber (reg:CC FLAGS_REG))])] + "operands[2] = gen_lowpart (QImode, operands[2]);") (define_insn "*bmi2_ashl3_1" [(set (match_operand:SWI48 0 "register_operand" "=r") @@ -10290,23 +10294,27 @@ "ix86_expand_binary_operator (, mode, operands); DONE;") ;; Avoid useless masking of count operand. -(define_insn "*3_mask" - [(set (match_operand:SWI48 0 "nonimmediate_operand" "=rm") +(define_insn_and_split "*3_mask" + [(set (match_operand:SWI48 0 "nonimmediate_operand") (any_shiftrt:SWI48 - (match_operand:SWI48 1 "nonimmediate_operand" "0") + (match_operand:SWI48 1 "nonimmediate_operand") (subreg:QI (and:SI - (match_operand:SI 2 "register_operand" "c") - (match_operand:SI 3 "const_int_operand" "n")) 0))) + (match_operand:SI 2 "register_operand") + (match_operand:SI 3 "const_int_operand")) 0))) (clobber (reg:CC FLAGS_REG))] "ix86_binary_operator_ok (, mode, operands) && (INTVAL (operands[3]) & (GET_MODE_BITSIZE (mode)-1)) - == GET_MODE_BITSIZE (mode)-1" -{ - return "{}\t{%b2, %0|%0, %b2}"; -} - [(set_attr "type" "ishift") - (set_attr "mode" "")]) + == GET_MODE_BITSIZE (mode)-1 + && can_create_pseudo_p ()" + "#" + "&& 1" + [(parallel + [(set (match_dup 0) + (any_shiftrt:SWI48 (match_dup 1) + (match_dup 2))) + (clobber (reg:CC FLAGS_REG))])] + "operands[2] = gen_lowpart (QImode, operands[2]);") (define_insn_and_split "*3_doubleword" [(set (match_operand:DWI 0 "register_operand" "=r") @@ -10745,23 +10753,27 @@ "ix86_expand_binary_operator (, mode, operands); DONE;") ;; Avoid useless masking of count operand. -(define_insn "*3_mask" - [(set (match_operand:SWI48 0 "nonimmediate_operand" "=rm") +(define_insn_and_split "*3_mask" + [(set (match_operand:SWI48 0 "nonimmediate_operand") (any_rotate:SWI48 - (match_operand:SWI48 1 "nonimmediate_operand" "0") + (match_operand:SWI48 1 "nonimmediate_operand") (subreg:QI (and:SI - (match_operand:SI 2 "register_operand" "c") - (match_operand:SI 3 "const_int_operand" "n")) 0))) + (match_operand:SI 2 "register_operand") + (match_operand:SI 3 "const_int_operand")) 0))) (clobber (reg:CC FLAGS_REG))] "ix86_binary_operator_ok (, mode, operands) && (INTVAL (operands[3]) & (GET_MODE_BITSIZE (mode)-1)) - == GET_MODE_BITSIZE (mode)-1" -{ - return "{}\t{%b2, %0|%0, %b2}"; -} - [(set_attr "type" "rotate") - (set_attr "mode" "")]) + == GET_MODE_BITSIZE (mode)-1 + && can_create_pseudo_p ()" + "#" + "&& 1" + [(parallel + [(set (match_dup 0) + (any_rotate:SWI48 (match_dup 1) + (match_dup 2))) + (clobber (reg:CC FLAGS_REG))])] + "operands[2] = gen_lowpart (QImode, operands[2]);") ;; Implement rotation using two double-precision ;; shift instructions and a scratch register.