From: Richard Biener Date: Wed, 4 May 2016 12:37:56 +0000 (+0000) Subject: match.pd: Add BIT_FIELD_REF canonicalizations and vector constructor simplifications. X-Git-Url: https://git.libre-soc.org/?a=commitdiff_plain;h=d3e40b7647aa1bb68bf1299e8a72bcb38af16cfb;p=gcc.git match.pd: Add BIT_FIELD_REF canonicalizations and vector constructor simplifications. 2016-05-04 Richard Biener * match.pd: Add BIT_FIELD_REF canonicalizations and vector constructor simplifications. * fold-const.c (fold_ternary_loc): Remove duplicate functionality here. From-SVN: r235871 --- diff --git a/gcc/ChangeLog b/gcc/ChangeLog index 272b1ed1cc2..623b269d3e0 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,3 +1,10 @@ +2016-05-04 Richard Biener + + * match.pd: Add BIT_FIELD_REF canonicalizations and vector + constructor simplifications. + * fold-const.c (fold_ternary_loc): Remove duplicate functionality + here. + 2016-05-04 Oleg Endo * config/sh/predicates (post_inc_mem, pre_dec_mem): New predicates. diff --git a/gcc/fold-const.c b/gcc/fold-const.c index 0004f789d53..416ec5d7a84 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -11719,9 +11719,7 @@ fold_ternary_loc (location_t loc, enum tree_code code, tree type, gcc_unreachable (); case BIT_FIELD_REF: - if ((TREE_CODE (arg0) == VECTOR_CST - || (TREE_CODE (arg0) == CONSTRUCTOR - && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE)) + if (TREE_CODE (arg0) == VECTOR_CST && (type == TREE_TYPE (TREE_TYPE (arg0)) || (TREE_CODE (type) == VECTOR_TYPE && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))) @@ -11749,88 +11747,32 @@ fold_ternary_loc (location_t loc, enum tree_code code, tree type, vals[i] = VECTOR_CST_ELT (arg0, idx + i); return build_vector (type, vals); } - - /* Constructor elements can be subvectors. */ - unsigned HOST_WIDE_INT k = 1; - if (CONSTRUCTOR_NELTS (arg0) != 0) - { - tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value); - if (TREE_CODE (cons_elem) == VECTOR_TYPE) - k = TYPE_VECTOR_SUBPARTS (cons_elem); - } - - /* We keep an exact subset of the constructor elements. */ - if ((idx % k) == 0 && (n % k) == 0) - { - if (CONSTRUCTOR_NELTS (arg0) == 0) - return build_constructor (type, NULL); - idx /= k; - n /= k; - if (n == 1) - { - if (idx < CONSTRUCTOR_NELTS (arg0)) - return CONSTRUCTOR_ELT (arg0, idx)->value; - return build_zero_cst (type); - } - - vec *vals; - vec_alloc (vals, n); - for (unsigned i = 0; - i < n && idx + i < CONSTRUCTOR_NELTS (arg0); - ++i) - CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE, - CONSTRUCTOR_ELT - (arg0, idx + i)->value); - return build_constructor (type, vals); - } - /* The bitfield references a single constructor element. */ - else if (idx + n <= (idx / k + 1) * k) - { - if (CONSTRUCTOR_NELTS (arg0) <= idx / k) - return build_zero_cst (type); - else if (n == k) - return CONSTRUCTOR_ELT (arg0, idx / k)->value; - else - return fold_build3_loc (loc, code, type, - CONSTRUCTOR_ELT (arg0, idx / k)->value, op1, - build_int_cst (TREE_TYPE (op2), (idx % k) * width)); - } } } - /* A bit-field-ref that referenced the full argument can be stripped. */ - if (INTEGRAL_TYPE_P (TREE_TYPE (arg0)) - && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1) - && integer_zerop (op2)) - return fold_convert_loc (loc, type, arg0); - /* On constants we can use native encode/interpret to constant fold (nearly) all BIT_FIELD_REFs. */ if (CONSTANT_CLASS_P (arg0) && can_native_interpret_type_p (type) - && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0))) - /* This limitation should not be necessary, we just need to - round this up to mode size. */ - && tree_to_uhwi (op1) % BITS_PER_UNIT == 0 - /* Need bit-shifting of the buffer to relax the following. */ - && tree_to_uhwi (op2) % BITS_PER_UNIT == 0) + && BITS_PER_UNIT == 8) { unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2); unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1); - unsigned HOST_WIDE_INT clen; - clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0))); - /* ??? We cannot tell native_encode_expr to start at - some random byte only. So limit us to a reasonable amount - of work. */ - if (clen <= 4096) + /* Limit us to a reasonable amount of work. To relax the + other limitations we need bit-shifting of the buffer + and rounding up the size. */ + if (bitpos % BITS_PER_UNIT == 0 + && bitsize % BITS_PER_UNIT == 0 + && bitsize <= MAX_BITSIZE_MODE_ANY_MODE) { - unsigned char *b = XALLOCAVEC (unsigned char, clen); - unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen); + unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT]; + unsigned HOST_WIDE_INT len + = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT, + bitpos / BITS_PER_UNIT); if (len > 0 - && len * BITS_PER_UNIT >= bitpos + bitsize) + && len * BITS_PER_UNIT >= bitsize) { - tree v = native_interpret_expr (type, - b + bitpos / BITS_PER_UNIT, + tree v = native_interpret_expr (type, b, bitsize / BITS_PER_UNIT); if (v) return v; diff --git a/gcc/match.pd b/gcc/match.pd index ee2aee37234..55dd23cc670 100644 --- a/gcc/match.pd +++ b/gcc/match.pd @@ -3229,3 +3229,99 @@ DEFINE_INT_AND_FLOAT_ROUND_FN (RINT) WARN_STRICT_OVERFLOW_COMPARISON); } (cmp @0 { res; }))))))))) + +/* Canonicalizations of BIT_FIELD_REFs. */ + +(simplify + (BIT_FIELD_REF @0 @1 @2) + (switch + (if (TREE_CODE (TREE_TYPE (@0)) == COMPLEX_TYPE + && tree_int_cst_equal (@1, TYPE_SIZE (TREE_TYPE (TREE_TYPE (@0))))) + (switch + (if (integer_zerop (@2)) + (view_convert (realpart @0))) + (if (tree_int_cst_equal (@2, TYPE_SIZE (TREE_TYPE (TREE_TYPE (@0))))) + (view_convert (imagpart @0))))) + (if (INTEGRAL_TYPE_P (TREE_TYPE (@0)) + && INTEGRAL_TYPE_P (type) + /* A bit-field-ref that referenced the full argument can be stripped. */ + && ((compare_tree_int (@1, TYPE_PRECISION (TREE_TYPE (@0))) == 0 + && integer_zerop (@2)) + /* Low-parts can be reduced to integral conversions. + ??? The following doesn't work for PDP endian. */ + || (BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN + /* Don't even think about BITS_BIG_ENDIAN. */ + && TYPE_PRECISION (TREE_TYPE (@0)) % BITS_PER_UNIT == 0 + && TYPE_PRECISION (type) % BITS_PER_UNIT == 0 + && compare_tree_int (@2, (BYTES_BIG_ENDIAN + ? (TYPE_PRECISION (TREE_TYPE (@0)) + - TYPE_PRECISION (type)) + : 0)) == 0))) + (convert @0)))) + +/* Simplify vector extracts. */ + +(simplify + (BIT_FIELD_REF CONSTRUCTOR@0 @1 @2) + (if (VECTOR_TYPE_P (TREE_TYPE (@0)) + && (types_match (type, TREE_TYPE (TREE_TYPE (@0))) + || (VECTOR_TYPE_P (type) + && types_match (TREE_TYPE (type), TREE_TYPE (TREE_TYPE (@0)))))) + (with + { + tree ctor = (TREE_CODE (@0) == SSA_NAME + ? gimple_assign_rhs1 (SSA_NAME_DEF_STMT (@0)) : @0); + tree eltype = TREE_TYPE (TREE_TYPE (ctor)); + unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype)); + unsigned HOST_WIDE_INT n = tree_to_uhwi (@1); + unsigned HOST_WIDE_INT idx = tree_to_uhwi (@2); + } + (if (n != 0 + && (idx % width) == 0 + && (n % width) == 0 + && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (ctor))) + (with + { + idx = idx / width; + n = n / width; + /* Constructor elements can be subvectors. */ + unsigned HOST_WIDE_INT k = 1; + if (CONSTRUCTOR_NELTS (ctor) != 0) + { + tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (ctor, 0)->value); + if (TREE_CODE (cons_elem) == VECTOR_TYPE) + k = TYPE_VECTOR_SUBPARTS (cons_elem); + } + } + (switch + /* We keep an exact subset of the constructor elements. */ + (if ((idx % k) == 0 && (n % k) == 0) + (if (CONSTRUCTOR_NELTS (ctor) == 0) + { build_constructor (type, NULL); } + (with + { + idx /= k; + n /= k; + } + (if (n == 1) + (if (idx < CONSTRUCTOR_NELTS (ctor)) + { CONSTRUCTOR_ELT (ctor, idx)->value; } + { build_zero_cst (type); }) + { + vec *vals; + vec_alloc (vals, n); + for (unsigned i = 0; + i < n && idx + i < CONSTRUCTOR_NELTS (ctor); ++i) + CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE, + CONSTRUCTOR_ELT (ctor, idx + i)->value); + build_constructor (type, vals); + })))) + /* The bitfield references a single constructor element. */ + (if (idx + n <= (idx / k + 1) * k) + (switch + (if (CONSTRUCTOR_NELTS (ctor) <= idx / k) + { build_zero_cst (type); }) + (if (n == k) + { CONSTRUCTOR_ELT (ctor, idx / k)->value; }) + (BIT_FIELD_REF { CONSTRUCTOR_ELT (ctor, idx / k)->value; } + @1 { bitsize_int ((idx % k) * width); })))))))))