From: Daniel Berlin Date: Sat, 1 Dec 2001 01:59:02 +0000 (+0000) Subject: rs6000.c (altivec_expand_builtin): add ALTIVEC_BUILTIN_LD_INTERNAL_4sf and ALTIVEC_BU... X-Git-Url: https://git.libre-soc.org/?a=commitdiff_plain;h=f18c054f039f8d5642a4bfb89db758f3abf3acfd;p=gcc.git rs6000.c (altivec_expand_builtin): add ALTIVEC_BUILTIN_LD_INTERNAL_4sf and ALTIVEC_BUILTIN_ST_INTERNAL_4sf... 2001-11-30 Daniel Berlin * config/rs6000/rs6000.c (altivec_expand_builtin): add ALTIVEC_BUILTIN_LD_INTERNAL_4sf and ALTIVEC_BUILTIN_ST_INTERNAL_4sf, *_16qi,_8hi, rename existing V4SI ones to *_4si. (altivec_init_builtins): Ditto. (bdesc_2arg): Rename CODE_FOR_* to match changes in MD file. * config/rs6000/rs6000.md: Add attribute types vecsimple, veccomplex, vecfloat, and vecperm, for altivec instructions. Modify altivec patterns to use approriate attribute type. Modify altivec patterns to match RTL operations where approriate (IE no unspec where we can avoid it). Add vector unit scheduling for ppc7450. Rename patterns to what they are where approriate (altivec_vaddfp->addv4sf3, etc) * config/rs6000/rs6000.h (enum rs6000_builtins): Change VRS->VSR. Pass -mppc, and define _ARCH_PPC, if -mcpu=7450 is used. * config/rs6000/sysv4.h: Add -mcpu=7450. * testsuite/gcc.dg/altivec-1.c: Update test to take into account renamed _builtin_altivec_ld_interal function. From-SVN: r47502 --- diff --git a/gcc/ChangeLog b/gcc/ChangeLog index 5ffe4bde91c..71bacf59bfe 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,3 +1,28 @@ +2001-11-30 Daniel Berlin + + * config/rs6000/rs6000.c (altivec_expand_builtin): add + ALTIVEC_BUILTIN_LD_INTERNAL_4sf and ALTIVEC_BUILTIN_ST_INTERNAL_4sf, + *_16qi,_8hi, rename existing V4SI ones to *_4si. + (altivec_init_builtins): Ditto. + (bdesc_2arg): Rename CODE_FOR_* to match changes in MD file. + + * config/rs6000/rs6000.md: Add attribute types vecsimple, + veccomplex, vecfloat, and vecperm, for altivec instructions. + Modify altivec patterns to use approriate attribute type. + Modify altivec patterns to match RTL operations where approriate + (IE no unspec where we can avoid it). + Add vector unit scheduling for ppc7450. + Rename patterns to what they are where approriate + (altivec_vaddfp->addv4sf3, etc) + + * config/rs6000/rs6000.h (enum rs6000_builtins): Change VRS->VSR. + Pass -mppc, and define _ARCH_PPC, if -mcpu=7450 is used. + + * config/rs6000/sysv4.h: Add -mcpu=7450. + + * testsuite/gcc.dg/altivec-1.c: Update test to take into account renamed + _builtin_altivec_ld_interal function. + 2001-11-30 Kaveh R. Ghazi * configure.in (AC_CHECK_FUNCS): Delete strtoul, bsearch, popen, diff --git a/gcc/config/rs6000/rs6000.c b/gcc/config/rs6000/rs6000.c index ffeae83af9a..2f1b0a162ae 100644 --- a/gcc/config/rs6000/rs6000.c +++ b/gcc/config/rs6000/rs6000.c @@ -2976,10 +2976,10 @@ struct builtin_description /* Simple binary operatiors: VECc = foo (VECa, VECb). */ static const struct builtin_description bdesc_2arg[] = { - { MASK_ALTIVEC, CODE_FOR_altivec_vaddubm, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM }, - { MASK_ALTIVEC, CODE_FOR_altivec_vadduhm, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM }, - { MASK_ALTIVEC, CODE_FOR_altivec_vadduwm, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM }, - { MASK_ALTIVEC, CODE_FOR_altivec_vaddfp, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP }, + { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM }, + { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM }, + { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM }, + { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP }, { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW }, { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS }, { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS }, @@ -2987,7 +2987,7 @@ static const struct builtin_description bdesc_2arg[] = { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS }, { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS }, { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS }, - { MASK_ALTIVEC, CODE_FOR_altivec_vand, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND }, + { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND }, { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC }, { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB }, { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB }, @@ -3008,26 +3008,26 @@ static const struct builtin_description bdesc_2arg[] = { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW }, { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW }, { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP }, - { MASK_ALTIVEC, CODE_FOR_altivec_vmaxub, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB }, - { MASK_ALTIVEC, CODE_FOR_altivec_vmaxsb, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB }, - { MASK_ALTIVEC, CODE_FOR_altivec_vmaxuh, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH }, - { MASK_ALTIVEC, CODE_FOR_altivec_vmaxsh, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH }, - { MASK_ALTIVEC, CODE_FOR_altivec_vmaxuw, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW }, - { MASK_ALTIVEC, CODE_FOR_altivec_vmaxsw, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW }, - { MASK_ALTIVEC, CODE_FOR_altivec_vmaxfp, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP }, + { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB }, + { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB }, + { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH }, + { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH }, + { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW }, + { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW }, + { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP }, { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB }, { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH }, { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW }, { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB }, { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH }, { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW }, - { MASK_ALTIVEC, CODE_FOR_altivec_vminub, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB }, - { MASK_ALTIVEC, CODE_FOR_altivec_vminsb, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB }, - { MASK_ALTIVEC, CODE_FOR_altivec_vminuh, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH }, - { MASK_ALTIVEC, CODE_FOR_altivec_vminsh, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH }, - { MASK_ALTIVEC, CODE_FOR_altivec_vminuw, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW }, - { MASK_ALTIVEC, CODE_FOR_altivec_vminsw, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW }, - { MASK_ALTIVEC, CODE_FOR_altivec_vminfp, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP }, + { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB }, + { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB }, + { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH }, + { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH }, + { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW }, + { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW }, + { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP }, { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB }, { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB }, { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH }, @@ -3037,7 +3037,7 @@ static const struct builtin_description bdesc_2arg[] = { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH }, { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH }, { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR }, - { MASK_ALTIVEC, CODE_FOR_altivec_vor, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR }, + { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR }, { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM }, { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM }, { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX }, @@ -3058,17 +3058,17 @@ static const struct builtin_description bdesc_2arg[] = { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL }, { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO }, { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB }, - { MASK_ALTIVEC, CODE_FOR_altivec_vrsh, "__builtin_altivec_vrsh", ALTIVEC_BUILTIN_VRSH }, - { MASK_ALTIVEC, CODE_FOR_altivec_vrsw, "__builtin_altivec_vrsw", ALTIVEC_BUILTIN_VRSW }, + { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH }, + { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW }, { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB }, { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH }, { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW }, { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR }, { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO }, - { MASK_ALTIVEC, CODE_FOR_altivec_vsububm, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM }, - { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhm, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM }, - { MASK_ALTIVEC, CODE_FOR_altivec_vsubuwm, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM }, - { MASK_ALTIVEC, CODE_FOR_altivec_vsubfp, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP }, + { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM }, + { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM }, + { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM }, + { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP }, { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW }, { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS }, { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS }, @@ -3081,7 +3081,7 @@ static const struct builtin_description bdesc_2arg[] = { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS }, { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS }, { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS }, - { MASK_ALTIVEC, CODE_FOR_altivec_vxor, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR }, + { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR }, }; static rtx @@ -3134,8 +3134,8 @@ altivec_expand_builtin (exp, target) switch (fcode) { - case ALTIVEC_BUILTIN_LD_INTERNAL: - icode = CODE_FOR_altivec_lvx; + case ALTIVEC_BUILTIN_LD_INTERNAL_16qi: + icode = CODE_FOR_altivec_lvx_16qi; arg0 = TREE_VALUE (arglist); op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0); tmode = insn_data[icode].operand[0].mode; @@ -3154,9 +3154,126 @@ altivec_expand_builtin (exp, target) return 0; emit_insn (pat); return target; + case ALTIVEC_BUILTIN_LD_INTERNAL_8hi: + icode = CODE_FOR_altivec_lvx_8hi; + arg0 = TREE_VALUE (arglist); + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0); + tmode = insn_data[icode].operand[0].mode; + mode0 = insn_data[icode].operand[1].mode; - case ALTIVEC_BUILTIN_ST_INTERNAL: - icode = CODE_FOR_altivec_stvx; + if (! target + || GET_MODE (target) != tmode + || ! (*insn_data[icode].operand[0].predicate) (target, tmode)) + target = gen_reg_rtx (tmode); + + if (! (*insn_data[icode].operand[1].predicate) (op0, mode0)) + op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0)); + + pat = GEN_FCN (icode) (target, op0); + if (! pat) + return 0; + emit_insn (pat); + return target; + case ALTIVEC_BUILTIN_LD_INTERNAL_4si: + icode = CODE_FOR_altivec_lvx_4si; + arg0 = TREE_VALUE (arglist); + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0); + tmode = insn_data[icode].operand[0].mode; + mode0 = insn_data[icode].operand[1].mode; + + if (! target + || GET_MODE (target) != tmode + || ! (*insn_data[icode].operand[0].predicate) (target, tmode)) + target = gen_reg_rtx (tmode); + + if (! (*insn_data[icode].operand[1].predicate) (op0, mode0)) + op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0)); + + pat = GEN_FCN (icode) (target, op0); + if (! pat) + return 0; + emit_insn (pat); + return target; + case ALTIVEC_BUILTIN_LD_INTERNAL_4sf: + icode = CODE_FOR_altivec_lvx_4sf; + arg0 = TREE_VALUE (arglist); + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0); + tmode = insn_data[icode].operand[0].mode; + mode0 = insn_data[icode].operand[1].mode; + + if (! target + || GET_MODE (target) != tmode + || ! (*insn_data[icode].operand[0].predicate) (target, tmode)) + target = gen_reg_rtx (tmode); + + if (! (*insn_data[icode].operand[1].predicate) (op0, mode0)) + op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0)); + + pat = GEN_FCN (icode) (target, op0); + if (! pat) + return 0; + emit_insn (pat); + return target; + + case ALTIVEC_BUILTIN_ST_INTERNAL_16qi: + icode = CODE_FOR_altivec_stvx_16qi; + arg0 = TREE_VALUE (arglist); + arg1 = TREE_VALUE (TREE_CHAIN (arglist)); + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0); + op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0); + mode0 = insn_data[icode].operand[0].mode; + mode1 = insn_data[icode].operand[1].mode; + + if (! (*insn_data[icode].operand[0].predicate) (op0, mode0)) + op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0)); + if (! (*insn_data[icode].operand[1].predicate) (op1, mode1)) + op1 = copy_to_mode_reg (mode1, op1); + + pat = GEN_FCN (icode) (op0, op1); + if (! pat) + return 0; + emit_insn (pat); + return NULL_RTX; + case ALTIVEC_BUILTIN_ST_INTERNAL_8hi: + icode = CODE_FOR_altivec_stvx_8hi; + arg0 = TREE_VALUE (arglist); + arg1 = TREE_VALUE (TREE_CHAIN (arglist)); + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0); + op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0); + mode0 = insn_data[icode].operand[0].mode; + mode1 = insn_data[icode].operand[1].mode; + + if (! (*insn_data[icode].operand[0].predicate) (op0, mode0)) + op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0)); + if (! (*insn_data[icode].operand[1].predicate) (op1, mode1)) + op1 = copy_to_mode_reg (mode1, op1); + + pat = GEN_FCN (icode) (op0, op1); + if (! pat) + return 0; + emit_insn (pat); + return NULL_RTX; + case ALTIVEC_BUILTIN_ST_INTERNAL_4si: + icode = CODE_FOR_altivec_stvx_4si; + arg0 = TREE_VALUE (arglist); + arg1 = TREE_VALUE (TREE_CHAIN (arglist)); + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0); + op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0); + mode0 = insn_data[icode].operand[0].mode; + mode1 = insn_data[icode].operand[1].mode; + + if (! (*insn_data[icode].operand[0].predicate) (op0, mode0)) + op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0)); + if (! (*insn_data[icode].operand[1].predicate) (op1, mode1)) + op1 = copy_to_mode_reg (mode1, op1); + + pat = GEN_FCN (icode) (op0, op1); + if (! pat) + return 0; + emit_insn (pat); + return NULL_RTX; + case ALTIVEC_BUILTIN_ST_INTERNAL_4sf: + icode = CODE_FOR_altivec_stvx_4sf; arg0 = TREE_VALUE (arglist); arg1 = TREE_VALUE (TREE_CHAIN (arglist)); op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0); @@ -3223,11 +3340,26 @@ altivec_init_builtins (void) tree endlink = void_list_node; tree pint_type_node = build_pointer_type (integer_type_node); + tree pshort_type_node = build_pointer_type (short_integer_type_node); + tree pchar_type_node = build_pointer_type (char_type_node); + tree pfloat_type_node = build_pointer_type (float_type_node); /* V4SI foo (int *). */ tree v4si_ftype_pint = build_function_type (V4SI_type_node, tree_cons (NULL_TREE, pint_type_node, endlink)); + /* V8HI foo (short *). */ + tree v8hi_ftype_pshort + = build_function_type (V8HI_type_node, + tree_cons (NULL_TREE, pshort_type_node, endlink)); + /* V16QI foo (char *). */ + tree v16qi_ftype_pchar + = build_function_type (V16QI_type_node, + tree_cons (NULL_TREE, pchar_type_node, endlink)); + /* V4SF foo (float *). */ + tree v4sf_ftype_pfloat + = build_function_type (V4SF_type_node, + tree_cons (NULL_TREE, pfloat_type_node, endlink)); /* void foo (int *, V4SI). */ tree void_ftype_pint_v4si @@ -3235,6 +3367,24 @@ altivec_init_builtins (void) tree_cons (NULL_TREE, pint_type_node, tree_cons (NULL_TREE, V4SI_type_node, endlink))); + /* void foo (short *, V8HI). */ + tree void_ftype_pshort_v8hi + = build_function_type (void_type_node, + tree_cons (NULL_TREE, pshort_type_node, + tree_cons (NULL_TREE, V8HI_type_node, + endlink))); + /* void foo (char *, V16QI). */ + tree void_ftype_pchar_v16qi + = build_function_type (void_type_node, + tree_cons (NULL_TREE, pchar_type_node, + tree_cons (NULL_TREE, V16QI_type_node, + endlink))); + /* void foo (float *, V4SF). */ + tree void_ftype_pfloat_v4sf + = build_function_type (void_type_node, + tree_cons (NULL_TREE, pfloat_type_node, + tree_cons (NULL_TREE, V4SF_type_node, + endlink))); tree v4si_ftype_v4si_v4si = build_function_type (V4SI_type_node, @@ -3326,8 +3476,14 @@ altivec_init_builtins (void) tree_cons (NULL_TREE, V8HI_type_node, endlink))); - def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal", v4si_ftype_pint, ALTIVEC_BUILTIN_LD_INTERNAL); - def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal", void_ftype_pint_v4si, ALTIVEC_BUILTIN_ST_INTERNAL); + def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pfloat, ALTIVEC_BUILTIN_LD_INTERNAL_4sf); + def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf, ALTIVEC_BUILTIN_ST_INTERNAL_4sf); + def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pint, ALTIVEC_BUILTIN_LD_INTERNAL_4si); + def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si, ALTIVEC_BUILTIN_ST_INTERNAL_4si); + def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pshort, ALTIVEC_BUILTIN_LD_INTERNAL_8hi); + def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi, ALTIVEC_BUILTIN_ST_INTERNAL_8hi); + def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pchar, ALTIVEC_BUILTIN_LD_INTERNAL_16qi); + def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi, ALTIVEC_BUILTIN_ST_INTERNAL_16qi); /* Add the simple binary operators. */ d = (struct builtin_description *) bdesc_2arg; diff --git a/gcc/config/rs6000/rs6000.h b/gcc/config/rs6000/rs6000.h index 11747ef0bef..e81069affb7 100644 --- a/gcc/config/rs6000/rs6000.h +++ b/gcc/config/rs6000/rs6000.h @@ -76,6 +76,7 @@ Boston, MA 02111-1307, USA. */ %{mcpu=604e: -D_ARCH_PPC} \ %{mcpu=620: -D_ARCH_PPC} \ %{mcpu=740: -D_ARCH_PPC} \ +%{mcpu=7450: -D_ARCH_PPC} \ %{mcpu=750: -D_ARCH_PPC} \ %{mcpu=801: -D_ARCH_PPC} \ %{mcpu=821: -D_ARCH_PPC} \ @@ -113,6 +114,7 @@ Boston, MA 02111-1307, USA. */ %{mcpu=604e: -mppc} \ %{mcpu=620: -mppc} \ %{mcpu=740: -mppc} \ +%{mcpu=7450: -mppc} \ %{mcpu=750: -mppc} \ %{mcpu=801: -mppc} \ %{mcpu=821: -mppc} \ @@ -293,9 +295,9 @@ extern int target_flags; N_("Use PowerPC-64 instruction set")}, \ {"no-powerpc64", - MASK_POWERPC64, \ N_("Don't use PowerPC-64 instruction set")}, \ - {"altivec", MASK_ALTIVEC, \ + {"altivec", MASK_ALTIVEC , \ N_("Use AltiVec instructions.")}, \ - {"no-altivec", - MASK_ALTIVEC, \ + {"no-altivec", - MASK_ALTIVEC , \ N_("Don't use AltiVec instructions.")}, \ {"new-mnemonics", MASK_NEW_MNEMONICS, \ N_("Use new mnemonics for PowerPC architecture")},\ @@ -2852,8 +2854,14 @@ extern int frame_pointer_needed; enum rs6000_builtins { /* AltiVec builtins. */ - ALTIVEC_BUILTIN_ST_INTERNAL, - ALTIVEC_BUILTIN_LD_INTERNAL, + ALTIVEC_BUILTIN_ST_INTERNAL_4si, + ALTIVEC_BUILTIN_LD_INTERNAL_4si, + ALTIVEC_BUILTIN_ST_INTERNAL_8hi, + ALTIVEC_BUILTIN_LD_INTERNAL_8hi, + ALTIVEC_BUILTIN_ST_INTERNAL_16qi, + ALTIVEC_BUILTIN_LD_INTERNAL_16qi, + ALTIVEC_BUILTIN_ST_INTERNAL_4sf, + ALTIVEC_BUILTIN_LD_INTERNAL_4sf, ALTIVEC_BUILTIN_VADDUBM, ALTIVEC_BUILTIN_VADDUHM, ALTIVEC_BUILTIN_VADDUWM, @@ -2936,8 +2944,8 @@ enum rs6000_builtins ALTIVEC_BUILTIN_VSL, ALTIVEC_BUILTIN_VSLO, ALTIVEC_BUILTIN_VSRB, - ALTIVEC_BUILTIN_VRSH, - ALTIVEC_BUILTIN_VRSW, + ALTIVEC_BUILTIN_VSRH, + ALTIVEC_BUILTIN_VSRW, ALTIVEC_BUILTIN_VSRAB, ALTIVEC_BUILTIN_VSRAH, ALTIVEC_BUILTIN_VSRAW, diff --git a/gcc/config/rs6000/rs6000.md b/gcc/config/rs6000/rs6000.md index ef50cd17051..a8a5a95a78f 100644 --- a/gcc/config/rs6000/rs6000.md +++ b/gcc/config/rs6000/rs6000.md @@ -37,7 +37,7 @@ ;; Define an insn type attribute. This is used in function unit delay ;; computations. -(define_attr "type" "integer,load,store,fpload,fpstore,imul,lmul,idiv,ldiv,branch,compare,cr_logical,delayed_compare,fpcompare,mtjmpr,fp,dmul,sdiv,ddiv,ssqrt,dsqrt,jmpreg,altivec" +(define_attr "type" "integer,load,store,fpload,fpstore,vecload,vecstore,imul,lmul,idiv,ldiv,branch,compare,cr_logical,delayed_compare,fpcompare,mtjmpr,fp,dmul,sdiv,ddiv,ssqrt,dsqrt,jmpreg,vecsimple,veccomplex,veccmp,vecperm,vecfloat,altivec" (const_string "integer")) ;; Length (in bytes). @@ -70,7 +70,7 @@ 2 1) (define_function_unit "lsu" 1 0 - (and (eq_attr "type" "load") + (and (eq_attr "type" "load,vecload") (eq_attr "cpu" "ppc7450")) 3 1) @@ -85,7 +85,7 @@ 2 1) (define_function_unit "lsu" 1 0 - (and (eq_attr "type" "store") + (and (eq_attr "type" "store,vecstore") (eq_attr "cpu" "ppc7450")) 3 1) @@ -317,6 +317,26 @@ (and (eq_attr "type" "cr_logical") (eq_attr "cpu" "ppc7450")) 1 1) +(define_function_unit "viu1" 1 0 + (and (eq_attr "type" "vecsimple") + (eq_attr "cpu" "ppc7450")) + 1 1) +(define_function_unit "viu2" 1 0 + (and (eq_attr "type" "veccomplex") + (eq_attr "cpu" "ppc7450")) + 4 1) +(define_function_unit "vfpu" 1 0 + (and (eq_attr "type" "veccmp") + (eq_attr "cpu" "ppc7450")) + 2 1) +(define_function_unit "vfpu" 1 0 + (and (eq_attr "type" "vecfloat") + (eq_attr "cpu" "ppc7450")) + 4 1) +(define_function_unit "vpu" 1 0 + (and (eq_attr "type" "vecperm") + (eq_attr "cpu" "ppc7450")) + 2 1) ; PPC750 has two integer units: a primary one which can perform all ; operations and a secondary one which is fed in lock step with the first @@ -13559,20 +13579,62 @@ ;; AltiVec patterns ;; Generic LVX load instruction. -(define_insn "altivec_lvx" +(define_insn "altivec_lvx_4si" [(set (match_operand:V4SI 0 "register_operand" "=v") (match_operand:V4SI 1 "memory_operand" "m"))] "TARGET_ALTIVEC" "lvx %0,%y1" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecload")]) + +(define_insn "altivec_lvx_8hi" + [(set (match_operand:V8HI 0 "register_operand" "=v") + (match_operand:V8HI 1 "memory_operand" "m"))] + "TARGET_ALTIVEC" + "lvx %0,%y1" + [(set_attr "type" "vecload")]) + +(define_insn "altivec_lvx_16qi" + [(set (match_operand:V16QI 0 "register_operand" "=v") + (match_operand:V16QI 1 "memory_operand" "m"))] + "TARGET_ALTIVEC" + "lvx %0,%y1" + [(set_attr "type" "vecload")]) + +(define_insn "altivec_lvx_4sf" + [(set (match_operand:V4SF 0 "register_operand" "=v") + (match_operand:V4SF 1 "memory_operand" "m"))] + "TARGET_ALTIVEC" + "lvx %0,%y1" + [(set_attr "type" "vecload")]) ;; Generic STVX store instruction. -(define_insn "altivec_stvx" +(define_insn "altivec_stvx_4si" [(set (match_operand:V4SI 0 "memory_operand" "=m") (match_operand:V4SI 1 "register_operand" "v"))] "TARGET_ALTIVEC" "stvx %1,%y0" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecstore")]) + +(define_insn "altivec_stvx_8hi" + [(set (match_operand:V8HI 0 "memory_operand" "=m") + (match_operand:V8HI 1 "register_operand" "v"))] + "TARGET_ALTIVEC" + "stvx %1,%y0" + [(set_attr "type" "vecstore")]) + +(define_insn "altivec_stvx_16qi" + [(set (match_operand:V16QI 0 "memory_operand" "=m") + (match_operand:V16QI 1 "register_operand" "v"))] + "TARGET_ALTIVEC" + "stvx %1,%y0" + [(set_attr "type" "vecstore")]) + +(define_insn "altivec_stvx_4sf" + [(set (match_operand:V4SF 0 "memory_operand" "=m") + (match_operand:V4SF 1 "register_operand" "v"))] + "TARGET_ALTIVEC" + "stvx %1,%y0" + [(set_attr "type" "vecstore")]) ;; Vector move instructions. (define_expand "movv4si" @@ -13666,37 +13728,37 @@ ;; Simple binary operations. -(define_insn "altivec_vaddubm" +(define_insn "addv16qi3" [(set (match_operand:V16QI 0 "register_operand" "=v") - (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v") - (match_operand:V16QI 2 "register_operand" "v")] 31))] + (plus:V16QI (match_operand:V16QI 1 "register_operand" "v") + (match_operand:V16QI 2 "register_operand" "v")))] "TARGET_ALTIVEC" "vaddubm %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) -(define_insn "altivec_vadduhm" +(define_insn "addv8hi3" [(set (match_operand:V8HI 0 "register_operand" "=v") - (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v") - (match_operand:V8HI 2 "register_operand" "v")] 32))] + (plus:V8HI (match_operand:V8HI 1 "register_operand" "v") + (match_operand:V8HI 2 "register_operand" "v")))] "TARGET_ALTIVEC" "vadduhm %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) -(define_insn "altivec_vadduwm" +(define_insn "addv4si3" [(set (match_operand:V4SI 0 "register_operand" "=v") - (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v") - (match_operand:V4SI 2 "register_operand" "v")] 33))] + (plus:V4SI (match_operand:V4SI 1 "register_operand" "v") + (match_operand:V4SI 2 "register_operand" "v")))] "TARGET_ALTIVEC" "vadduwm %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) -(define_insn "altivec_vaddfp" +(define_insn "addv4sf3" [(set (match_operand:V4SF 0 "register_operand" "=v") - (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v") - (match_operand:V4SF 2 "register_operand" "v")] 34))] + (plus:V4SF (match_operand:V4SF 1 "register_operand" "v") + (match_operand:V4SF 2 "register_operand" "v")))] "TARGET_ALTIVEC" "vaddfp %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecfloat")]) (define_insn "altivec_vaddcuw" [(set (match_operand:V4SI 0 "register_operand" "=v") @@ -13704,7 +13766,7 @@ (match_operand:V4SI 2 "register_operand" "v")] 35))] "TARGET_ALTIVEC" "vaddcuw %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) (define_insn "altivec_vaddubs" [(set (match_operand:V16QI 0 "register_operand" "=v") @@ -13712,7 +13774,7 @@ (match_operand:V16QI 2 "register_operand" "v")] 36))] "TARGET_ALTIVEC" "vaddubs %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) (define_insn "altivec_vaddsbs" [(set (match_operand:V16QI 0 "register_operand" "=v") @@ -13720,7 +13782,7 @@ (match_operand:V16QI 2 "register_operand" "v")] 37))] "TARGET_ALTIVEC" "vaddsbs %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) (define_insn "altivec_vadduhs" [(set (match_operand:V8HI 0 "register_operand" "=v") @@ -13728,7 +13790,7 @@ (match_operand:V8HI 2 "register_operand" "v")] 38))] "TARGET_ALTIVEC" "vadduhs %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) (define_insn "altivec_vaddshs" [(set (match_operand:V8HI 0 "register_operand" "=v") @@ -13736,7 +13798,7 @@ (match_operand:V8HI 2 "register_operand" "v")] 39))] "TARGET_ALTIVEC" "vaddshs %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) (define_insn "altivec_vadduws" [(set (match_operand:V4SI 0 "register_operand" "=v") @@ -13744,7 +13806,7 @@ (match_operand:V4SI 2 "register_operand" "v")] 40))] "TARGET_ALTIVEC" "vadduws %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) (define_insn "altivec_vaddsws" [(set (match_operand:V4SI 0 "register_operand" "=v") @@ -13752,15 +13814,15 @@ (match_operand:V4SI 2 "register_operand" "v")] 41))] "TARGET_ALTIVEC" "vaddsws %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) -(define_insn "altivec_vand" +(define_insn "andv4si3" [(set (match_operand:V4SI 0 "register_operand" "=v") - (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v") - (match_operand:V4SI 2 "register_operand" "v")] 42))] + (and:V4SI (match_operand:V4SI 1 "register_operand" "v") + (match_operand:V4SI 2 "register_operand" "v")))] "TARGET_ALTIVEC" "vand %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) (define_insn "altivec_vandc" [(set (match_operand:V4SI 0 "register_operand" "=v") @@ -13768,7 +13830,7 @@ (match_operand:V4SI 2 "register_operand" "v")] 43))] "TARGET_ALTIVEC" "vandc %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) (define_insn "altivec_vavgub" [(set (match_operand:V16QI 0 "register_operand" "=v") @@ -13776,7 +13838,7 @@ (match_operand:V16QI 2 "register_operand" "v")] 44))] "TARGET_ALTIVEC" "vavgub %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) (define_insn "altivec_vavgsb" [(set (match_operand:V16QI 0 "register_operand" "=v") @@ -13784,7 +13846,7 @@ (match_operand:V16QI 2 "register_operand" "v")] 45))] "TARGET_ALTIVEC" "vavgsb %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) (define_insn "altivec_vavguh" [(set (match_operand:V8HI 0 "register_operand" "=v") @@ -13792,7 +13854,7 @@ (match_operand:V8HI 2 "register_operand" "v")] 46))] "TARGET_ALTIVEC" "vavguh %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) (define_insn "altivec_vavgsh" [(set (match_operand:V8HI 0 "register_operand" "=v") @@ -13800,7 +13862,7 @@ (match_operand:V8HI 2 "register_operand" "v")] 47))] "TARGET_ALTIVEC" "vavgsh %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) (define_insn "altivec_vavguw" [(set (match_operand:V4SI 0 "register_operand" "=v") @@ -13808,7 +13870,7 @@ (match_operand:V4SI 2 "register_operand" "v")] 48))] "TARGET_ALTIVEC" "vavguw %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) (define_insn "altivec_vavgsw" [(set (match_operand:V4SI 0 "register_operand" "=v") @@ -13816,7 +13878,7 @@ (match_operand:V4SI 2 "register_operand" "v")] 49))] "TARGET_ALTIVEC" "vavgsw %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) (define_insn "altivec_vcmpbfp" [(set (match_operand:V4SI 0 "register_operand" "=v") @@ -13824,7 +13886,7 @@ (match_operand:V4SF 2 "register_operand" "v")] 50))] "TARGET_ALTIVEC" "vcmpbfp %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "veccmp")]) (define_insn "altivec_vcmpequb" [(set (match_operand:V16QI 0 "register_operand" "=v") @@ -13832,7 +13894,7 @@ (match_operand:V16QI 2 "register_operand" "v")] 51))] "TARGET_ALTIVEC" "vcmpequb %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) (define_insn "altivec_vcmpequh" [(set (match_operand:V8HI 0 "register_operand" "=v") @@ -13840,7 +13902,7 @@ (match_operand:V8HI 2 "register_operand" "v")] 52))] "TARGET_ALTIVEC" "vcmpequh %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) (define_insn "altivec_vcmpequw" [(set (match_operand:V4SI 0 "register_operand" "=v") @@ -13848,7 +13910,7 @@ (match_operand:V4SI 2 "register_operand" "v")] 53))] "TARGET_ALTIVEC" "vcmpequw %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) (define_insn "altivec_vcmpeqfp" [(set (match_operand:V4SI 0 "register_operand" "=v") @@ -13856,7 +13918,7 @@ (match_operand:V4SF 2 "register_operand" "v")] 54))] "TARGET_ALTIVEC" "vcmpeqfp %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "veccmp")]) (define_insn "altivec_vcmpgefp" [(set (match_operand:V4SI 0 "register_operand" "=v") @@ -13864,7 +13926,7 @@ (match_operand:V4SF 2 "register_operand" "v")] 55))] "TARGET_ALTIVEC" "vcmpgefp %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "veccmp")]) (define_insn "altivec_vcmpgtub" [(set (match_operand:V16QI 0 "register_operand" "=v") @@ -13872,7 +13934,7 @@ (match_operand:V16QI 2 "register_operand" "v")] 56))] "TARGET_ALTIVEC" "vcmpgtub %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) (define_insn "altivec_vcmpgtsb" [(set (match_operand:V16QI 0 "register_operand" "=v") @@ -13880,7 +13942,7 @@ (match_operand:V16QI 2 "register_operand" "v")] 57))] "TARGET_ALTIVEC" "vcmpgtsb %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) (define_insn "altivec_vcmpgtuh" [(set (match_operand:V8HI 0 "register_operand" "=v") @@ -13888,7 +13950,7 @@ (match_operand:V8HI 2 "register_operand" "v")] 58))] "TARGET_ALTIVEC" "vcmpgtuh %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) (define_insn "altivec_vcmpgtsh" [(set (match_operand:V8HI 0 "register_operand" "=v") @@ -13896,7 +13958,7 @@ (match_operand:V8HI 2 "register_operand" "v")] 59))] "TARGET_ALTIVEC" "vcmpgtsh %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) (define_insn "altivec_vcmpgtuw" [(set (match_operand:V4SI 0 "register_operand" "=v") @@ -13904,7 +13966,7 @@ (match_operand:V4SI 2 "register_operand" "v")] 60))] "TARGET_ALTIVEC" "vcmpgtuw %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) (define_insn "altivec_vcmpgtsw" [(set (match_operand:V4SI 0 "register_operand" "=v") @@ -13912,7 +13974,7 @@ (match_operand:V4SI 2 "register_operand" "v")] 61))] "TARGET_ALTIVEC" "vcmpgtsw %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) (define_insn "altivec_vcmpgtfp" [(set (match_operand:V4SI 0 "register_operand" "=v") @@ -13920,63 +13982,63 @@ (match_operand:V4SF 2 "register_operand" "v")] 62))] "TARGET_ALTIVEC" "vcmpgtfp %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "veccmp")]) -(define_insn "altivec_vmaxub" +(define_insn "umaxv16qi3" [(set (match_operand:V16QI 0 "register_operand" "=v") - (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v") - (match_operand:V16QI 2 "register_operand" "v")] 63))] + (umax:V16QI (match_operand:V16QI 1 "register_operand" "v") + (match_operand:V16QI 2 "register_operand" "v")))] "TARGET_ALTIVEC" "vmaxub %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) -(define_insn "altivec_vmaxsb" +(define_insn "smaxv16qi3" [(set (match_operand:V16QI 0 "register_operand" "=v") - (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v") - (match_operand:V16QI 2 "register_operand" "v")] 64))] + (smax:V16QI (match_operand:V16QI 1 "register_operand" "v") + (match_operand:V16QI 2 "register_operand" "v")))] "TARGET_ALTIVEC" "vmaxsb %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) -(define_insn "altivec_vmaxuh" +(define_insn "umaxv8hi3" [(set (match_operand:V8HI 0 "register_operand" "=v") - (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v") - (match_operand:V8HI 2 "register_operand" "v")] 65))] + (umax:V8HI (match_operand:V8HI 1 "register_operand" "v") + (match_operand:V8HI 2 "register_operand" "v")))] "TARGET_ALTIVEC" "vmaxuh %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) -(define_insn "altivec_vmaxsh" +(define_insn "smaxv8hi3" [(set (match_operand:V8HI 0 "register_operand" "=v") - (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v") - (match_operand:V8HI 2 "register_operand" "v")] 66))] + (smax:V8HI (match_operand:V8HI 1 "register_operand" "v") + (match_operand:V8HI 2 "register_operand" "v")))] "TARGET_ALTIVEC" "vmaxsh %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) -(define_insn "altivec_vmaxuw" +(define_insn "umaxv4si3" [(set (match_operand:V4SI 0 "register_operand" "=v") - (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v") - (match_operand:V4SI 2 "register_operand" "v")] 67))] + (umax:V4SI (match_operand:V4SI 1 "register_operand" "v") + (match_operand:V4SI 2 "register_operand" "v")))] "TARGET_ALTIVEC" "vmaxuw %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) -(define_insn "altivec_vmaxsw" +(define_insn "smaxv4si3" [(set (match_operand:V4SI 0 "register_operand" "=v") - (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v") - (match_operand:V4SI 2 "register_operand" "v")] 68))] + (smax:V4SI (match_operand:V4SI 1 "register_operand" "v") + (match_operand:V4SI 2 "register_operand" "v")))] "TARGET_ALTIVEC" "vmaxsw %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) -(define_insn "altivec_vmaxfp" +(define_insn "smaxv4sf3" [(set (match_operand:V4SF 0 "register_operand" "=v") - (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v") - (match_operand:V4SF 2 "register_operand" "v")] 69))] + (smax:V4SF (match_operand:V4SF 1 "register_operand" "v") + (match_operand:V4SF 2 "register_operand" "v")))] "TARGET_ALTIVEC" "vmaxfp %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "veccmp")]) (define_insn "altivec_vmrghb" [(set (match_operand:V16QI 0 "register_operand" "=v") @@ -13984,7 +14046,7 @@ (match_operand:V16QI 2 "register_operand" "v")] 70))] "TARGET_ALTIVEC" "vmrghb %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecperm")]) (define_insn "altivec_vmrghh" [(set (match_operand:V8HI 0 "register_operand" "=v") @@ -13992,7 +14054,7 @@ (match_operand:V8HI 2 "register_operand" "v")] 71))] "TARGET_ALTIVEC" "vmrghh %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecperm")]) (define_insn "altivec_vmrghw" [(set (match_operand:V4SI 0 "register_operand" "=v") @@ -14000,7 +14062,7 @@ (match_operand:V4SI 2 "register_operand" "v")] 72))] "TARGET_ALTIVEC" "vmrghw %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecperm")]) (define_insn "altivec_vmrglb" [(set (match_operand:V16QI 0 "register_operand" "=v") @@ -14008,7 +14070,7 @@ (match_operand:V16QI 2 "register_operand" "v")] 73))] "TARGET_ALTIVEC" "vmrglb %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecperm")]) (define_insn "altivec_vmrglh" [(set (match_operand:V8HI 0 "register_operand" "=v") @@ -14016,7 +14078,7 @@ (match_operand:V8HI 2 "register_operand" "v")] 74))] "TARGET_ALTIVEC" "vmrglh %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecperm")]) (define_insn "altivec_vmrglw" [(set (match_operand:V4SI 0 "register_operand" "=v") @@ -14024,63 +14086,63 @@ (match_operand:V4SI 2 "register_operand" "v")] 75))] "TARGET_ALTIVEC" "vmrglw %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecperm")]) -(define_insn "altivec_vminub" +(define_insn "uminv16qi3" [(set (match_operand:V16QI 0 "register_operand" "=v") - (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v") - (match_operand:V16QI 2 "register_operand" "v")] 76))] + (umin:V16QI (match_operand:V16QI 1 "register_operand" "v") + (match_operand:V16QI 2 "register_operand" "v")))] "TARGET_ALTIVEC" "vminub %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) -(define_insn "altivec_vminsb" +(define_insn "sminv16qi3" [(set (match_operand:V16QI 0 "register_operand" "=v") - (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v") - (match_operand:V16QI 2 "register_operand" "v")] 77))] + (smin:V16QI (match_operand:V16QI 1 "register_operand" "v") + (match_operand:V16QI 2 "register_operand" "v")))] "TARGET_ALTIVEC" "vminsb %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) -(define_insn "altivec_vminuh" +(define_insn "uminv8hi3" [(set (match_operand:V8HI 0 "register_operand" "=v") - (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v") - (match_operand:V8HI 2 "register_operand" "v")] 78))] + (umin:V8HI (match_operand:V8HI 1 "register_operand" "v") + (match_operand:V8HI 2 "register_operand" "v")))] "TARGET_ALTIVEC" "vminuh %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) -(define_insn "altivec_vminsh" +(define_insn "sminv8hi3" [(set (match_operand:V8HI 0 "register_operand" "=v") - (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v") - (match_operand:V8HI 2 "register_operand" "v")] 79))] + (smin:V8HI (match_operand:V8HI 1 "register_operand" "v") + (match_operand:V8HI 2 "register_operand" "v")))] "TARGET_ALTIVEC" "vminsh %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) -(define_insn "altivec_vminuw" +(define_insn "uminv4si3" [(set (match_operand:V4SI 0 "register_operand" "=v") - (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v") - (match_operand:V4SI 2 "register_operand" "v")] 80))] + (umin:V4SI (match_operand:V4SI 1 "register_operand" "v") + (match_operand:V4SI 2 "register_operand" "v")))] "TARGET_ALTIVEC" "vminuw %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) -(define_insn "altivec_vminsw" +(define_insn "sminv4si3" [(set (match_operand:V4SI 0 "register_operand" "=v") - (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v") - (match_operand:V4SI 2 "register_operand" "v")] 81))] + (smin:V4SI (match_operand:V4SI 1 "register_operand" "v") + (match_operand:V4SI 2 "register_operand" "v")))] "TARGET_ALTIVEC" "vminsw %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) -(define_insn "altivec_vminfp" +(define_insn "sminv4sf3" [(set (match_operand:V4SF 0 "register_operand" "=v") - (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v") - (match_operand:V4SF 2 "register_operand" "v")] 82))] + (smin:V4SF (match_operand:V4SF 1 "register_operand" "v") + (match_operand:V4SF 2 "register_operand" "v")))] "TARGET_ALTIVEC" "vminfp %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "veccmp")]) (define_insn "altivec_vmuleub" [(set (match_operand:V8HI 0 "register_operand" "=v") @@ -14088,7 +14150,7 @@ (match_operand:V16QI 2 "register_operand" "v")] 83))] "TARGET_ALTIVEC" "vmuleub %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "veccomplex")]) (define_insn "altivec_vmulesb" [(set (match_operand:V8HI 0 "register_operand" "=v") @@ -14096,7 +14158,7 @@ (match_operand:V16QI 2 "register_operand" "v")] 84))] "TARGET_ALTIVEC" "vmulesb %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "veccomplex")]) (define_insn "altivec_vmuleuh" [(set (match_operand:V4SI 0 "register_operand" "=v") @@ -14104,7 +14166,7 @@ (match_operand:V8HI 2 "register_operand" "v")] 85))] "TARGET_ALTIVEC" "vmuleuh %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "veccomplex")]) (define_insn "altivec_vmulesh" [(set (match_operand:V4SI 0 "register_operand" "=v") @@ -14112,7 +14174,7 @@ (match_operand:V8HI 2 "register_operand" "v")] 86))] "TARGET_ALTIVEC" "vmulesh %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "veccomplex")]) (define_insn "altivec_vmuloub" [(set (match_operand:V8HI 0 "register_operand" "=v") @@ -14120,7 +14182,7 @@ (match_operand:V16QI 2 "register_operand" "v")] 87))] "TARGET_ALTIVEC" "vmuloub %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "veccomplex")]) (define_insn "altivec_vmulosb" [(set (match_operand:V8HI 0 "register_operand" "=v") @@ -14128,7 +14190,7 @@ (match_operand:V16QI 2 "register_operand" "v")] 88))] "TARGET_ALTIVEC" "vmulosb %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "veccomplex")]) (define_insn "altivec_vmulouh" [(set (match_operand:V4SI 0 "register_operand" "=v") @@ -14136,7 +14198,7 @@ (match_operand:V8HI 2 "register_operand" "v")] 89))] "TARGET_ALTIVEC" "vmulouh %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "veccomplex")]) (define_insn "altivec_vmulosh" [(set (match_operand:V4SI 0 "register_operand" "=v") @@ -14144,23 +14206,23 @@ (match_operand:V8HI 2 "register_operand" "v")] 90))] "TARGET_ALTIVEC" "vmulosh %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "veccomplex")]) (define_insn "altivec_vnor" [(set (match_operand:V4SI 0 "register_operand" "=v") - (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v") - (match_operand:V4SI 2 "register_operand" "v")] 91))] + (not:V4SI (ior:V4SI (match_operand:V4SI 1 "register_operand" "v") + (match_operand:V4SI 2 "register_operand" "v"))))] "TARGET_ALTIVEC" "vnor %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) -(define_insn "altivec_vor" +(define_insn "iorv4si3" [(set (match_operand:V4SI 0 "register_operand" "=v") - (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v") - (match_operand:V4SI 2 "register_operand" "v")] 92))] + (ior:V4SI (match_operand:V4SI 1 "register_operand" "v") + (match_operand:V4SI 2 "register_operand" "v")))] "TARGET_ALTIVEC" "vor %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) (define_insn "altivec_vpkuhum" [(set (match_operand:V16QI 0 "register_operand" "=v") @@ -14168,7 +14230,7 @@ (match_operand:V8HI 2 "register_operand" "v")] 93))] "TARGET_ALTIVEC" "vpkuhum %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecperm")]) (define_insn "altivec_vpkuwum" [(set (match_operand:V8HI 0 "register_operand" "=v") @@ -14176,7 +14238,7 @@ (match_operand:V4SI 2 "register_operand" "v")] 94))] "TARGET_ALTIVEC" "vpkuwum %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecperm")]) (define_insn "altivec_vpkpx" [(set (match_operand:V8HI 0 "register_operand" "=v") @@ -14184,7 +14246,7 @@ (match_operand:V4SI 2 "register_operand" "v")] 95))] "TARGET_ALTIVEC" "vpkpx %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecperm")]) (define_insn "altivec_vpkuhss" [(set (match_operand:V16QI 0 "register_operand" "=v") @@ -14192,7 +14254,7 @@ (match_operand:V8HI 2 "register_operand" "v")] 96))] "TARGET_ALTIVEC" "vpkuhss %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecperm")]) (define_insn "altivec_vpkshss" [(set (match_operand:V16QI 0 "register_operand" "=v") @@ -14200,7 +14262,7 @@ (match_operand:V8HI 2 "register_operand" "v")] 97))] "TARGET_ALTIVEC" "vpkshss %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecperm")]) (define_insn "altivec_vpkuwss" [(set (match_operand:V8HI 0 "register_operand" "=v") @@ -14208,7 +14270,7 @@ (match_operand:V4SI 2 "register_operand" "v")] 98))] "TARGET_ALTIVEC" "vpkuwss %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecperm")]) (define_insn "altivec_vpkswss" [(set (match_operand:V8HI 0 "register_operand" "=v") @@ -14216,7 +14278,7 @@ (match_operand:V4SI 2 "register_operand" "v")] 99))] "TARGET_ALTIVEC" "vpkswss %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecperm")]) (define_insn "altivec_vpkuhus" [(set (match_operand:V16QI 0 "register_operand" "=v") @@ -14224,7 +14286,7 @@ (match_operand:V8HI 2 "register_operand" "v")] 100))] "TARGET_ALTIVEC" "vpkuhus %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecperm")]) (define_insn "altivec_vpkshus" [(set (match_operand:V16QI 0 "register_operand" "=v") @@ -14232,7 +14294,7 @@ (match_operand:V8HI 2 "register_operand" "v")] 101))] "TARGET_ALTIVEC" "vpkshus %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecperm")]) (define_insn "altivec_vpkuwus" [(set (match_operand:V8HI 0 "register_operand" "=v") @@ -14240,7 +14302,7 @@ (match_operand:V4SI 2 "register_operand" "v")] 102))] "TARGET_ALTIVEC" "vpkuwus %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecperm")]) (define_insn "altivec_vpkswus" [(set (match_operand:V8HI 0 "register_operand" "=v") @@ -14248,7 +14310,7 @@ (match_operand:V4SI 2 "register_operand" "v")] 103))] "TARGET_ALTIVEC" "vpkswus %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecperm")]) (define_insn "altivec_vrlb" [(set (match_operand:V16QI 0 "register_operand" "=v") @@ -14256,7 +14318,7 @@ (match_operand:V16QI 2 "register_operand" "v")] 104))] "TARGET_ALTIVEC" "vrlb %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) (define_insn "altivec_vrlh" [(set (match_operand:V8HI 0 "register_operand" "=v") @@ -14264,7 +14326,7 @@ (match_operand:V8HI 2 "register_operand" "v")] 105))] "TARGET_ALTIVEC" "vrlh %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) (define_insn "altivec_vrlw" [(set (match_operand:V4SI 0 "register_operand" "=v") @@ -14272,7 +14334,7 @@ (match_operand:V4SI 2 "register_operand" "v")] 106))] "TARGET_ALTIVEC" "vrlw %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) (define_insn "altivec_vslb" [(set (match_operand:V16QI 0 "register_operand" "=v") @@ -14280,7 +14342,7 @@ (match_operand:V16QI 2 "register_operand" "v")] 107))] "TARGET_ALTIVEC" "vslb %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) (define_insn "altivec_vslh" [(set (match_operand:V8HI 0 "register_operand" "=v") @@ -14288,7 +14350,7 @@ (match_operand:V8HI 2 "register_operand" "v")] 108))] "TARGET_ALTIVEC" "vslh %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) (define_insn "altivec_vslw" [(set (match_operand:V4SI 0 "register_operand" "=v") @@ -14296,7 +14358,7 @@ (match_operand:V4SI 2 "register_operand" "v")] 109))] "TARGET_ALTIVEC" "vslw %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) (define_insn "altivec_vsl" [(set (match_operand:V4SI 0 "register_operand" "=v") @@ -14304,7 +14366,7 @@ (match_operand:V4SI 2 "register_operand" "v")] 110))] "TARGET_ALTIVEC" "vsl %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecperm")]) (define_insn "altivec_vslo" [(set (match_operand:V4SI 0 "register_operand" "=v") @@ -14312,7 +14374,7 @@ (match_operand:V4SI 2 "register_operand" "v")] 111))] "TARGET_ALTIVEC" "vslo %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecperm")]) (define_insn "altivec_vsrb" [(set (match_operand:V16QI 0 "register_operand" "=v") @@ -14320,23 +14382,23 @@ (match_operand:V16QI 2 "register_operand" "v")] 112))] "TARGET_ALTIVEC" "vsrb %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) -(define_insn "altivec_vrsh" +(define_insn "altivec_vsrh" [(set (match_operand:V8HI 0 "register_operand" "=v") (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v") (match_operand:V8HI 2 "register_operand" "v")] 113))] "TARGET_ALTIVEC" - "vrsh %0,%1,%2" - [(set_attr "type" "altivec")]) + "vsrh %0,%1,%2" + [(set_attr "type" "vecsimple")]) -(define_insn "altivec_vrsw" +(define_insn "altivec_vsrw" [(set (match_operand:V4SI 0 "register_operand" "=v") (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v") (match_operand:V4SI 2 "register_operand" "v")] 114))] "TARGET_ALTIVEC" - "vrsw %0,%1,%2" - [(set_attr "type" "altivec")]) + "vsrw %0,%1,%2" + [(set_attr "type" "vecsimple")]) (define_insn "altivec_vsrab" [(set (match_operand:V16QI 0 "register_operand" "=v") @@ -14344,7 +14406,7 @@ (match_operand:V16QI 2 "register_operand" "v")] 115))] "TARGET_ALTIVEC" "vsrab %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) (define_insn "altivec_vsrah" [(set (match_operand:V8HI 0 "register_operand" "=v") @@ -14352,7 +14414,7 @@ (match_operand:V8HI 2 "register_operand" "v")] 116))] "TARGET_ALTIVEC" "vsrah %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) (define_insn "altivec_vsraw" [(set (match_operand:V4SI 0 "register_operand" "=v") @@ -14360,7 +14422,7 @@ (match_operand:V4SI 2 "register_operand" "v")] 117))] "TARGET_ALTIVEC" "vsraw %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) (define_insn "altivec_vsr" [(set (match_operand:V4SI 0 "register_operand" "=v") @@ -14368,7 +14430,7 @@ (match_operand:V4SI 2 "register_operand" "v")] 118))] "TARGET_ALTIVEC" "vsr %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecperm")]) (define_insn "altivec_vsro" [(set (match_operand:V4SI 0 "register_operand" "=v") @@ -14376,39 +14438,39 @@ (match_operand:V4SI 2 "register_operand" "v")] 119))] "TARGET_ALTIVEC" "vsro %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecperm")]) -(define_insn "altivec_vsububm" +(define_insn "subv16qi3" [(set (match_operand:V16QI 0 "register_operand" "=v") - (unspec:V16QI [(match_operand:V16QI 1 "register_operand" "v") - (match_operand:V16QI 2 "register_operand" "v")] 120))] + (minus:V16QI (match_operand:V16QI 1 "register_operand" "v") + (match_operand:V16QI 2 "register_operand" "v")))] "TARGET_ALTIVEC" "vsububm %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) -(define_insn "altivec_vsubuhm" +(define_insn "subv8hi3" [(set (match_operand:V8HI 0 "register_operand" "=v") - (unspec:V8HI [(match_operand:V8HI 1 "register_operand" "v") - (match_operand:V8HI 2 "register_operand" "v")] 121))] + (minus:V8HI (match_operand:V8HI 1 "register_operand" "v") + (match_operand:V8HI 2 "register_operand" "v")))] "TARGET_ALTIVEC" "vsubuhm %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) -(define_insn "altivec_vsubuwm" +(define_insn "subv4si3" [(set (match_operand:V4SI 0 "register_operand" "=v") - (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v") - (match_operand:V4SI 2 "register_operand" "v")] 122))] + (minus:V4SI (match_operand:V4SI 1 "register_operand" "v") + (match_operand:V4SI 2 "register_operand" "v")))] "TARGET_ALTIVEC" "vsubuwm %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) -(define_insn "altivec_vsubfp" +(define_insn "subv4sf3" [(set (match_operand:V4SF 0 "register_operand" "=v") - (unspec:V4SF [(match_operand:V4SF 1 "register_operand" "v") - (match_operand:V4SF 2 "register_operand" "v")] 123))] + (minus:V4SF (match_operand:V4SF 1 "register_operand" "v") + (match_operand:V4SF 2 "register_operand" "v")))] "TARGET_ALTIVEC" "vsubfp %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecfloat")]) (define_insn "altivec_vsubcuw" [(set (match_operand:V4SI 0 "register_operand" "=v") @@ -14416,7 +14478,7 @@ (match_operand:V4SI 2 "register_operand" "v")] 124))] "TARGET_ALTIVEC" "vsubcuw %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) (define_insn "altivec_vsububs" [(set (match_operand:V16QI 0 "register_operand" "=v") @@ -14424,7 +14486,7 @@ (match_operand:V16QI 2 "register_operand" "v")] 125))] "TARGET_ALTIVEC" "vsububs %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) (define_insn "altivec_vsubsbs" [(set (match_operand:V16QI 0 "register_operand" "=v") @@ -14432,7 +14494,7 @@ (match_operand:V16QI 2 "register_operand" "v")] 126))] "TARGET_ALTIVEC" "vsubsbs %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) (define_insn "altivec_vsubuhs" [(set (match_operand:V8HI 0 "register_operand" "=v") @@ -14440,7 +14502,7 @@ (match_operand:V8HI 2 "register_operand" "v")] 127))] "TARGET_ALTIVEC" "vsubuhs %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) (define_insn "altivec_vsubshs" [(set (match_operand:V8HI 0 "register_operand" "=v") @@ -14448,7 +14510,7 @@ (match_operand:V8HI 2 "register_operand" "v")] 128))] "TARGET_ALTIVEC" "vsubshs %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) (define_insn "altivec_vsubuws" [(set (match_operand:V4SI 0 "register_operand" "=v") @@ -14456,7 +14518,7 @@ (match_operand:V4SI 2 "register_operand" "v")] 129))] "TARGET_ALTIVEC" "vsubuws %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) (define_insn "altivec_vsubsws" [(set (match_operand:V4SI 0 "register_operand" "=v") @@ -14464,7 +14526,7 @@ (match_operand:V4SI 2 "register_operand" "v")] 130))] "TARGET_ALTIVEC" "vsubsws %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) (define_insn "altivec_vsum4ubs" [(set (match_operand:V4SI 0 "register_operand" "=v") @@ -14472,7 +14534,7 @@ (match_operand:V4SI 2 "register_operand" "v")] 131))] "TARGET_ALTIVEC" "vsum4ubs %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "veccomplex")]) (define_insn "altivec_vsum4sbs" [(set (match_operand:V4SI 0 "register_operand" "=v") @@ -14480,7 +14542,7 @@ (match_operand:V4SI 2 "register_operand" "v")] 132))] "TARGET_ALTIVEC" "vsum4sbs %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "veccomplex")]) (define_insn "altivec_vsum4shs" [(set (match_operand:V4SI 0 "register_operand" "=v") @@ -14488,7 +14550,7 @@ (match_operand:V4SI 2 "register_operand" "v")] 133))] "TARGET_ALTIVEC" "vsum4shs %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "veccomplex")]) (define_insn "altivec_vsum2sws" [(set (match_operand:V4SI 0 "register_operand" "=v") @@ -14496,7 +14558,7 @@ (match_operand:V4SI 2 "register_operand" "v")] 134))] "TARGET_ALTIVEC" "vsum2sws %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "veccomplex")]) (define_insn "altivec_vsumsws" [(set (match_operand:V4SI 0 "register_operand" "=v") @@ -14504,12 +14566,12 @@ (match_operand:V4SI 2 "register_operand" "v")] 135))] "TARGET_ALTIVEC" "vsumsws %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "veccomplex")]) -(define_insn "altivec_vxor" +(define_insn "xorv4si3" [(set (match_operand:V4SI 0 "register_operand" "=v") - (unspec:V4SI [(match_operand:V4SI 1 "register_operand" "v") - (match_operand:V4SI 2 "register_operand" "v")] 136))] + (xor:V4SI (match_operand:V4SI 1 "register_operand" "v") + (match_operand:V4SI 2 "register_operand" "v")))] "TARGET_ALTIVEC" "vxor %0,%1,%2" - [(set_attr "type" "altivec")]) + [(set_attr "type" "vecsimple")]) diff --git a/gcc/config/rs6000/sysv4.h b/gcc/config/rs6000/sysv4.h index e5a92ecaaac..91478c1faed 100644 --- a/gcc/config/rs6000/sysv4.h +++ b/gcc/config/rs6000/sysv4.h @@ -1316,6 +1316,7 @@ ncrtn.o%s" %{mcpu=604e: -DCPU=PPC604} \ %{mcpu=620: -DCPU=PPC604} \ %{mcpu=740: -DCPU=PPC603} \ +%{mcpu=7450: -DCPU=PPC603} \ %{mcpu=750: -DCPU=PPC603} \ %{mcpu=801: -DCPU=PPC603} \ %{mcpu=821: -DCPU=PPC603} \ diff --git a/gcc/testsuite/gcc.dg/altivec-1.c b/gcc/testsuite/gcc.dg/altivec-1.c index 874abad7038..8fd40869efd 100644 --- a/gcc/testsuite/gcc.dg/altivec-1.c +++ b/gcc/testsuite/gcc.dg/altivec-1.c @@ -9,10 +9,10 @@ overloaded functions implemented in an . */ #define vec_load(src) \ - __builtin_altivec_ld_internal ((int *) src) + __builtin_altivec_ld_internal_4si ((int *) src) #define vec_store(dst, src) \ - __builtin_altivec_st_internal ((int *) dst, (int4) src) + __builtin_altivec_st_internal_4si ((int *) dst, (int4) src) #define vec_add_int4(x, y) \ __builtin_altivec_vaddsws (x, y)